From 9d23c1390fc9701565b464a2d6989dcfea443e3a Mon Sep 17 00:00:00 2001 From: Vallish Pai Date: Thu, 19 Dec 2024 11:43:15 +0530 Subject: [PATCH 01/82] [Enhancement] (nereids)implement adminSetTableStatusCommand in nereids (#45272) Issue Number: close #42851 --- .../org/apache/doris/nereids/DorisParser.g4 | 2 +- .../nereids/parser/LogicalPlanBuilder.java | 10 ++ .../doris/nereids/trees/plans/PlanType.java | 1 + .../commands/AdminSetTableStatusCommand.java | 98 +++++++++++++++++++ .../trees/plans/visitor/CommandVisitor.java | 5 + .../test_nereids_admin_set_tbl_status.groovy | 49 ++++++++++ 6 files changed, 164 insertions(+), 1 deletion(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/AdminSetTableStatusCommand.java create mode 100644 regression-test/suites/nereids_p0/ddl/admin/test_nereids_admin_set_tbl_status.groovy diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index 6c37a2b276b0dd..0cd32f3820fda4 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -508,6 +508,7 @@ supportedAdminStatement | ADMIN CLEAN TRASH (ON LEFT_PAREN backends+=STRING_LITERAL (COMMA backends+=STRING_LITERAL)* RIGHT_PAREN)? #adminCleanTrash + | ADMIN SET TABLE name=multipartIdentifier STATUS properties=propertyClause? #adminSetTableStatus ; supportedRecoverStatement @@ -528,7 +529,6 @@ unsupportedAdminStatement | ADMIN SET TABLE name=multipartIdentifier PARTITION VERSION properties=propertyClause? #adminSetPartitionVersion | ADMIN COPY TABLET tabletId=INTEGER_VALUE properties=propertyClause? #adminCopyTablet - | ADMIN SET TABLE name=multipartIdentifier STATUS properties=propertyClause? #adminSetTableStatus ; baseTableRef diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index a9ce9215d4d9be..68226f156c7f25 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -55,6 +55,7 @@ import org.apache.doris.nereids.DorisParser.AdminCompactTableContext; import org.apache.doris.nereids.DorisParser.AdminDiagnoseTabletContext; import org.apache.doris.nereids.DorisParser.AdminRebalanceDiskContext; +import org.apache.doris.nereids.DorisParser.AdminSetTableStatusContext; import org.apache.doris.nereids.DorisParser.AdminShowReplicaDistributionContext; import org.apache.doris.nereids.DorisParser.AdminShowReplicaStatusContext; import org.apache.doris.nereids.DorisParser.AdminShowTabletStorageFormatContext; @@ -491,6 +492,7 @@ import org.apache.doris.nereids.trees.plans.commands.AdminCleanTrashCommand; import org.apache.doris.nereids.trees.plans.commands.AdminCompactTableCommand; import org.apache.doris.nereids.trees.plans.commands.AdminRebalanceDiskCommand; +import org.apache.doris.nereids.trees.plans.commands.AdminSetTableStatusCommand; import org.apache.doris.nereids.trees.plans.commands.AdminShowReplicaStatusCommand; import org.apache.doris.nereids.trees.plans.commands.AlterCatalogCommentCommand; import org.apache.doris.nereids.trees.plans.commands.AlterMTMVCommand; @@ -4844,6 +4846,14 @@ public LogicalPlan visitShowCharset(ShowCharsetContext ctx) { return new ShowCharsetCommand(); } + @Override + public LogicalPlan visitAdminSetTableStatus(AdminSetTableStatusContext ctx) { + List dbTblNameParts = visitMultipartIdentifier(ctx.name); + Map properties = ctx.propertyClause() != null + ? Maps.newHashMap(visitPropertyClause(ctx.propertyClause())) : Maps.newHashMap(); + return new AdminSetTableStatusCommand(new TableNameInfo(dbTblNameParts), properties); + } + @Override public LogicalPlan visitShowFrontends(ShowFrontendsContext ctx) { String detail = (ctx.name != null) ? ctx.name.getText() : null; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index bfa0163e7d267e..6395f429db29de 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -201,6 +201,7 @@ public enum PlanType { DROP_USER_COMMAND, DROP_WORKLOAD_GROUP_NAME, DROP_WORKLOAD_POLICY_COMMAND, + ADMIN_SET_TABLE_STATUS_COMMAND, ALTER_CATALOG_COMMENT_COMMAND, ALTER_SQL_BLOCK_RULE_COMMAND, SHOW_BACKENDS_COMMAND, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/AdminSetTableStatusCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/AdminSetTableStatusCommand.java new file mode 100644 index 00000000000000..36c46c333468c7 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/AdminSetTableStatusCommand.java @@ -0,0 +1,98 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands; + +import org.apache.doris.catalog.Env; +import org.apache.doris.catalog.OlapTable.OlapTableState; +import org.apache.doris.common.AnalysisException; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.common.ErrorReport; +import org.apache.doris.common.UserException; +import org.apache.doris.common.util.Util; +import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.commands.info.TableNameInfo; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.StmtExecutor; + +import java.util.Map; + +/** + * AdminSetTableStatusCommand + */ +public class AdminSetTableStatusCommand extends Command implements ForwardWithSync { + public static final String TABLE_STATE = "state"; + private final TableNameInfo tableNameInfo; + private final Map properties; + private OlapTableState tableState; + + /** + * constructor + */ + public AdminSetTableStatusCommand(TableNameInfo tableNameInfo, Map properties) { + super(PlanType.ADMIN_SET_TABLE_STATUS_COMMAND); + this.tableNameInfo = tableNameInfo; + this.properties = properties; + } + + @Override + public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { + validate(ctx); + Env.getCurrentEnv().setTableStatusInternal(tableNameInfo.getDb(), tableNameInfo.getTbl(), tableState, false); + } + + private void validate(ConnectContext ctx) throws UserException { + // check auth + if (!Env.getCurrentEnv().getAccessManager().checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "ADMIN"); + } + + tableNameInfo.analyze(ctx); + Util.prohibitExternalCatalog(tableNameInfo.getCtl(), this.getClass().getSimpleName()); + + checkProperties(); + } + + private void checkProperties() throws AnalysisException { + for (Map.Entry entry : properties.entrySet()) { + String key = entry.getKey(); + String val = entry.getValue(); + + if (key.equalsIgnoreCase(TABLE_STATE)) { + try { + tableState = OlapTableState.valueOf(val.toUpperCase()); + } catch (IllegalArgumentException e) { + throw new AnalysisException("Invalid table state: " + val); + } + } else { + throw new AnalysisException("Unsupported property: " + key); + } + } + + if (tableState == null) { + throw new AnalysisException("Should add properties: STATE."); + } + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitAdminSetTableStatusCommand(this, context); + } + +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index 6efa58c1b8eb91..a9340894c33590 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -23,6 +23,7 @@ import org.apache.doris.nereids.trees.plans.commands.AdminCleanTrashCommand; import org.apache.doris.nereids.trees.plans.commands.AdminCompactTableCommand; import org.apache.doris.nereids.trees.plans.commands.AdminRebalanceDiskCommand; +import org.apache.doris.nereids.trees.plans.commands.AdminSetTableStatusCommand; import org.apache.doris.nereids.trees.plans.commands.AdminShowReplicaStatusCommand; import org.apache.doris.nereids.trees.plans.commands.AlterCatalogCommentCommand; import org.apache.doris.nereids.trees.plans.commands.AlterJobStatusCommand; @@ -236,6 +237,10 @@ default R visitAdminCleanTrashCommand(AdminCleanTrashCommand adminCleanTrashComm return visitCommand(adminCleanTrashCommand, context); } + default R visitAdminSetTableStatusCommand(AdminSetTableStatusCommand cmd, C context) { + return visitCommand(cmd, context); + } + default R visitDropConstraintCommand(DropConstraintCommand dropConstraintCommand, C context) { return visitCommand(dropConstraintCommand, context); } diff --git a/regression-test/suites/nereids_p0/ddl/admin/test_nereids_admin_set_tbl_status.groovy b/regression-test/suites/nereids_p0/ddl/admin/test_nereids_admin_set_tbl_status.groovy new file mode 100644 index 00000000000000..8e2a8b67aaed0f --- /dev/null +++ b/regression-test/suites/nereids_p0/ddl/admin/test_nereids_admin_set_tbl_status.groovy @@ -0,0 +1,49 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_nereids_admin_set_tbl_status") { + def tbName1 = "test_nereids_admin_set_tbl_status" + + try { + sql "DROP TABLE IF EXISTS ${tbName1}" + sql """ + CREATE TABLE IF NOT EXISTS ${tbName1} ( + k1 INT, + v1 INT, + v2 INT + ) + DUPLICATE KEY (k1) + DISTRIBUTED BY HASH(k1) BUCKETS 1 properties("replication_num" = "1", "light_schema_change" = "false", "disable_auto_compaction" = "true"); + """ + + // set table state to ROLLUP + checkNereidsExecute("ADMIN SET TABLE ${tbName1} STATUS PROPERTIES ('state' = 'rollup');") + // try alter table comment + test { + sql """ ALTER TABLE ${tbName1} MODIFY COMMENT 'test'; """ + exception "Table[test_nereids_admin_set_tbl_status]'s state(ROLLUP) is not NORMAL. Do not allow doing ALTER ops" + } + + // set table state to NORMAL + checkNereidsExecute("ADMIN SET TABLE ${tbName1} STATUS PROPERTIES ('state' = 'normal');") + // try alter table comment + sql """ ALTER TABLE ${tbName1} MODIFY COMMENT 'test'; """ + } finally { + // drop table + sql """ DROP TABLE ${tbName1} force""" + } +} From b124c560f78320ba8982698c06a07e5af6c50cd4 Mon Sep 17 00:00:00 2001 From: feiniaofeiafei Date: Thu, 19 Dec 2024 15:11:02 +0800 Subject: [PATCH 02/82] [fix](nereids) fix ExtractAndNormalizeWindowExpression bug (#45553) Problem Summary: In ExtractAndNormalizeWindowExpression, some expressions are pushed down and output by the bottom LogicalProject. If a window expression depends on these pushed-down expressions, those parts should be replaced accordingly. However, when a Literal is pushed down, it should not be used as a replacement. Example: For the window expression: last_value(c1, false) over() If the expression false is pushed down as alias1, the window expression would incorrectly be replaced as: last_value(c1, alias1) over() This PR fixes the issue by ensuring Literals are not replaced when pushed down. --- .../ExtractAndNormalizeWindowExpression.java | 3 +- .../normalize_window_nullable_agg_test.out | 4 ++ .../normalize_window_nullable_agg_test.groovy | 57 +++++++++++++++++++ 3 files changed, 63 insertions(+), 1 deletion(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/ExtractAndNormalizeWindowExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/ExtractAndNormalizeWindowExpression.java index 2cfe4523003879..a74ebe4b76b9cf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/ExtractAndNormalizeWindowExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/ExtractAndNormalizeWindowExpression.java @@ -27,6 +27,7 @@ import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.expressions.WindowExpression; import org.apache.doris.nereids.trees.expressions.functions.agg.NullableAggregateFunction; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.logical.LogicalProject; import org.apache.doris.nereids.trees.plans.logical.LogicalWindow; @@ -117,7 +118,7 @@ private Plan normalize(LogicalProject project) { // we need replace alias's child expr with corresponding alias's slot in output // so create a customNormalizeMap alias's child -> alias.toSlot to do it Map customNormalizeMap = toBePushedDown.stream() - .filter(expr -> expr instanceof Alias) + .filter(expr -> expr instanceof Alias && !(expr.child(0) instanceof Literal)) .collect(Collectors.toMap(expr -> ((Alias) expr).child(), expr -> ((Alias) expr).toSlot(), (oldExpr, newExpr) -> oldExpr)); diff --git a/regression-test/data/nereids_rules_p0/normalize_window/normalize_window_nullable_agg_test.out b/regression-test/data/nereids_rules_p0/normalize_window/normalize_window_nullable_agg_test.out index 2df25bb0d3bed2..f910333a57e176 100644 --- a/regression-test/data/nereids_rules_p0/normalize_window/normalize_window_nullable_agg_test.out +++ b/regression-test/data/nereids_rules_p0/normalize_window/normalize_window_nullable_agg_test.out @@ -291,3 +291,7 @@ false false false +-- !fold_window -- +0 false 0 be +9999-12-31 23:59:59 false 9999-12-31 23:59:59 b + diff --git a/regression-test/suites/nereids_rules_p0/normalize_window/normalize_window_nullable_agg_test.groovy b/regression-test/suites/nereids_rules_p0/normalize_window/normalize_window_nullable_agg_test.groovy index 9617e8bb19cd95..df3fd63d750742 100644 --- a/regression-test/suites/nereids_rules_p0/normalize_window/normalize_window_nullable_agg_test.groovy +++ b/regression-test/suites/nereids_rules_p0/normalize_window/normalize_window_nullable_agg_test.groovy @@ -72,4 +72,61 @@ suite("normalize_window_nullable_agg") { sql "select group_concat(xwho order by xwhat) over(partition by xwhen) from windowfunnel_test_normalize_window;" exception "order by is not supported" } + + sql "set enable_fold_constant_by_be = 1;" + sql "drop table if exists fold_window1" + sql """create table fold_window1 ( + pk int, + col_char_255__undef_signed char(255) null , + col_char_100__undef_signed char(100) null , + col_varchar_255__undef_signed varchar(255) null , + col_char_255__undef_signed_not_null char(255) not null , + col_char_100__undef_signed_not_null char(100) not null , + col_varchar_255__undef_signed_not_null varchar(255) not null , + col_varchar_1000__undef_signed varchar(1000) null , + col_varchar_1000__undef_signed_not_null varchar(1000) not null , + col_varchar_1001__undef_signed varchar(1001) null , + col_varchar_1001__undef_signed_not_null varchar(1001) not null , + col_string_undef_signed string null , + col_string_undef_signed_not_null string not null + ) engine=olap + DUPLICATE KEY(pk, col_char_255__undef_signed, col_char_100__undef_signed, col_varchar_255__undef_signed) + distributed by hash(pk) buckets 10 + properties("bloom_filter_columns" = "col_char_255__undef_signed, col_char_100__undef_signed, col_varchar_255__undef_signed ", "replication_num" = "1");""" + sql """insert into fold_window1(pk,col_char_255__undef_signed,col_char_255__undef_signed_not_null,col_char_100__undef_signed + ,col_char_100__undef_signed_not_null,col_varchar_255__undef_signed,col_varchar_255__undef_signed_not_null,col_varchar_1000__undef_signed,col_varchar_1000__undef_signed_not_null + ,col_varchar_1001__undef_signed,col_varchar_1001__undef_signed_not_null,col_string_undef_signed,col_string_undef_signed_not_null) + values (0,'like','9999-12-31 23:59:59','9999-12-31 23:59:59','c','20240803','2024-08-03 13:08:30','300.343','2024-07-01','that''s','9999-12-31 23:59:59','s','b'), + (1,'be','g','f','not','20240803','20240803','2024-08-03 13:08:30','g','20240803','0','2024-07-01','be')""" + + sql "drop table if exists fold_window2" + sql """create table fold_window2 ( + pk int, + col_char_255__undef_signed char(255) null , + col_char_255__undef_signed_not_null char(255) not null , + col_char_100__undef_signed char(100) null , + col_char_100__undef_signed_not_null char(100) not null , + col_varchar_255__undef_signed varchar(255) null , + col_varchar_255__undef_signed_not_null varchar(255) not null , + col_varchar_1000__undef_signed varchar(1000) null , + col_varchar_1000__undef_signed_not_null varchar(1000) not null , + col_varchar_1001__undef_signed varchar(1001) null , + col_varchar_1001__undef_signed_not_null varchar(1001) not null , + col_string_undef_signed string null , + col_string_undef_signed_not_null string not null + ) engine=olap + DUPLICATE KEY(pk) + distributed by hash(pk) buckets 10 + properties ("bloom_filter_columns" = "col_char_255__undef_signed, col_char_100__undef_signed, col_varchar_255__undef_signed ", "replication_num" = "1");""" + sql """insert into fold_window2(pk,col_char_255__undef_signed,col_char_255__undef_signed_not_null,col_char_100__undef_signed + ,col_char_100__undef_signed_not_null,col_varchar_255__undef_signed,col_varchar_255__undef_signed_not_null,col_varchar_1000__undef_signed + ,col_varchar_1000__undef_signed_not_null,col_varchar_1001__undef_signed,col_varchar_1001__undef_signed_not_null,col_string_undef_signed,col_string_undef_signed_not_null) + values (0,'some','2024-07-01','9999-12-31 23:59:59','9999-12-31 23:59:59','9999-12-31 23:59:59','300.343','2024-07-01','1','1','2024-07-01','2024-08-03 13:08:30','2024-08-03 13:08:30');""" + + qt_fold_window """ + select initcap(col_varchar_1001__undef_signed_not_null) col_alias97650 , starts_with('ourBZbRijD', "e") AS col_alias97651 , + col_varchar_1001__undef_signed_not_null AS col_alias97652 , LAST_VALUE(col_string_undef_signed_not_null , false) over ( order by pk ) + AS col_alias97653 from fold_window1 where 'DCOFMrybqf' <> (select min ( col_char_255__undef_signed ) + from fold_window2) ORDER BY col_alias97650,col_alias97651,col_alias97652,col_alias97653 ; + """ } \ No newline at end of file From 2a7c2e3f410a715ff9c22890de06d809fa411c52 Mon Sep 17 00:00:00 2001 From: yujun Date: Thu, 19 Dec 2024 15:47:50 +0800 Subject: [PATCH 03/82] [fix](create table) fix create table fail msg (#45623) --- .../org/apache/doris/clone/TabletScheduler.java | 2 +- .../java/org/apache/doris/system/Backend.java | 3 +++ .../apache/doris/system/SystemInfoService.java | 2 +- .../apache/doris/catalog/CreateTableTest.java | 17 +++++++++++++++++ 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java index 504e1d36a65af3..1545236aa59cd0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java +++ b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java @@ -1545,7 +1545,7 @@ private RootPathLoadStatistic doChooseAvailableDestPath(TabletSchedCtx tabletCtx !allFitPathsSameMedium.isEmpty() ? allFitPathsSameMedium : allFitPathsDiffMedium; if (allFitPaths.isEmpty()) { List backendsInfo = Env.getCurrentSystemInfo().getAllClusterBackendsNoException().values().stream() - .filter(be -> be.getLocationTag() == tag) + .filter(be -> be.getLocationTag().equals(tag)) .map(Backend::getDetailsForCreateReplica) .collect(Collectors.toList()); throw new SchedException(Status.UNRECOVERABLE, String.format("unable to find dest path for new replica" diff --git a/fe/fe-core/src/main/java/org/apache/doris/system/Backend.java b/fe/fe-core/src/main/java/org/apache/doris/system/Backend.java index 974c0e0cae13a7..c864e1ba2ae0ba 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/system/Backend.java +++ b/fe/fe-core/src/main/java/org/apache/doris/system/Backend.java @@ -28,6 +28,7 @@ import org.apache.doris.common.util.PrintableMap; import org.apache.doris.common.util.TimeUtils; import org.apache.doris.persist.gson.GsonUtils; +import org.apache.doris.qe.SimpleScheduler; import org.apache.doris.resource.Tag; import org.apache.doris.system.HeartbeatResponse.HbStatus; import org.apache.doris.thrift.TDisk; @@ -340,6 +341,8 @@ public String getDetailsForCreateReplica() { sb.append(", isDecommissioned=true, exclude it"); } else if (isComputeNode()) { sb.append(", isComputeNode=true, exclude it"); + } else if (!Config.disable_backend_black_list && !SimpleScheduler.isAvailable(this)) { + sb.append(", is in black list, exclude it"); } else { sb.append(", hdd disks count={"); if (hddOk > 0) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/system/SystemInfoService.java b/fe/fe-core/src/main/java/org/apache/doris/system/SystemInfoService.java index 447dc3457aefe1..f7359293621e3d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/system/SystemInfoService.java +++ b/fe/fe-core/src/main/java/org/apache/doris/system/SystemInfoService.java @@ -567,7 +567,7 @@ public String getDetailsForCreateReplica(ReplicaAllocation replicaAlloc) { StringBuilder sb = new StringBuilder(" Backends details: "); for (Tag tag : replicaAlloc.getAllocMap().keySet()) { sb.append("backends with tag ").append(tag).append(" is "); - sb.append(idToBackendRef.values().stream().filter(be -> be.getLocationTag() == tag) + sb.append(idToBackendRef.values().stream().filter(be -> be.getLocationTag().equals(tag)) .map(Backend::getDetailsForCreateReplica) .collect(Collectors.toList())); sb.append(", "); diff --git a/fe/fe-core/src/test/java/org/apache/doris/catalog/CreateTableTest.java b/fe/fe-core/src/test/java/org/apache/doris/catalog/CreateTableTest.java index 50e9ac40bc7d93..76acb4ad76e196 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/catalog/CreateTableTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/catalog/CreateTableTest.java @@ -25,12 +25,15 @@ import org.apache.doris.common.ExceptionChecker; import org.apache.doris.common.FeConstants; import org.apache.doris.common.UserException; +import org.apache.doris.resource.Tag; import org.apache.doris.utframe.TestWithFeService; +import com.google.common.collect.Maps; import org.junit.Assert; import org.junit.jupiter.api.Test; import java.util.HashSet; +import java.util.Map; import java.util.Set; import java.util.UUID; @@ -789,6 +792,20 @@ public void testCreateTableWithForceReplica() throws DdlException { } } + @Test + public void testCreateTableDetailMsg() throws Exception { + Map allocMap = Maps.newHashMap(); + allocMap.put(Tag.create(Tag.TYPE_LOCATION, "group_a"), (short) 6); + Assert.assertEquals(" Backends details: backends with tag {\"location\" : \"group_a\"} is [], ", + Env.getCurrentSystemInfo().getDetailsForCreateReplica(new ReplicaAllocation(allocMap))); + + allocMap.clear(); + allocMap.put(Tag.create(Tag.TYPE_LOCATION, new String(Tag.VALUE_DEFAULT_TAG)), (short) 6); + String msg = Env.getCurrentSystemInfo().getDetailsForCreateReplica(new ReplicaAllocation(allocMap)); + Assert.assertTrue("msg: " + msg, msg.contains("Backends details: backends with tag {\"location\" : \"default\"} is [[backendId=") + && msg.contains("hdd disks count={ok=1,}, ssd disk count={}], [backendId=")); + } + @Test public void testCreateTableWithMinLoadReplicaNum() throws Exception { ExceptionChecker.expectThrowsNoException( From a3279a260441692cd0494c0eb8a93e5f97aa2945 Mon Sep 17 00:00:00 2001 From: Mryange Date: Thu, 19 Dec 2024 17:04:03 +0800 Subject: [PATCH 04/82] [refine](exchange) Use is_merge from FE for judgment instead of relying on the operator in BE. (#45592) ### What problem does this PR solve? Previously, determining whether the receiver is a merge exchange relied on checking if the specific operator was a sort node. However, this approach is incorrect because there are many types of sort operators: regular sort, partitioned sort, and spill sort. --- be/src/pipeline/exec/exchange_sink_operator.cpp | 10 ++-------- be/src/pipeline/exec/exchange_sink_operator.h | 4 +++- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/be/src/pipeline/exec/exchange_sink_operator.cpp b/be/src/pipeline/exec/exchange_sink_operator.cpp index e7fed76be8fa16..cc789f6e25b20b 100644 --- a/be/src/pipeline/exec/exchange_sink_operator.cpp +++ b/be/src/pipeline/exec/exchange_sink_operator.cpp @@ -32,7 +32,6 @@ #include "pipeline/exec/operator.h" #include "pipeline/exec/sort_source_operator.h" #include "pipeline/local_exchange/local_exchange_sink_operator.h" -#include "pipeline/local_exchange/local_exchange_source_operator.h" #include "pipeline/pipeline_fragment_context.h" #include "util/runtime_profile.h" #include "util/uid_util.h" @@ -279,6 +278,7 @@ ExchangeSinkOperatorX::ExchangeSinkOperatorX( _tablet_sink_txn_id(sink.tablet_sink_txn_id), _t_tablet_sink_exprs(&sink.tablet_sink_exprs), _enable_local_merge_sort(state->enable_local_merge_sort()), + _dest_is_merge(sink.__isset.is_merge && sink.is_merge), _fragment_instance_ids(fragment_instance_ids) { DCHECK_GT(destinations.size(), 0); DCHECK(sink.output_partition.type == TPartitionType::UNPARTITIONED || @@ -571,19 +571,13 @@ std::shared_ptr ExchangeSinkOperatorX::_create_buffer( // Therefore, a shared sink buffer is used here to limit the number of concurrent RPCs. // (Note: This does not reduce the total number of RPCs.) // In a merge sort scenario, there are only n RPCs, so a shared sink buffer is not needed. -/// TODO: Modify this to let FE handle the judgment instead of BE. std::shared_ptr ExchangeSinkOperatorX::get_sink_buffer( InstanceLoId sender_ins_id) { - if (!_child) { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "ExchangeSinkOperatorX did not correctly set the child."); - } // When the child is SortSourceOperatorX or LocalExchangeSourceOperatorX, // it is an order-by scenario. // In this case, there is only one target instance, and no n * n RPC concurrency will occur. // Therefore, sharing a sink buffer is not necessary. - if (std::dynamic_pointer_cast(_child) || - std::dynamic_pointer_cast(_child)) { + if (_dest_is_merge) { return _create_buffer({sender_ins_id}); } if (_state->enable_shared_exchange_sink_buffer()) { diff --git a/be/src/pipeline/exec/exchange_sink_operator.h b/be/src/pipeline/exec/exchange_sink_operator.h index 85575beb9f7e47..3d6eeb4b39e94f 100644 --- a/be/src/pipeline/exec/exchange_sink_operator.h +++ b/be/src/pipeline/exec/exchange_sink_operator.h @@ -205,7 +205,6 @@ class ExchangeSinkOperatorX final : public DataSinkOperatorX get_sink_buffer(InstanceLoId sender_ins_id); vectorized::VExprContextSPtrs& tablet_sink_expr_ctxs() { return _tablet_sink_expr_ctxs; } @@ -260,6 +259,9 @@ class ExchangeSinkOperatorX final : public DataSinkOperatorX& _fragment_instance_ids; }; From a3de177da0799b93c02a173c659800fde7f5278d Mon Sep 17 00:00:00 2001 From: bobhan1 Date: Thu, 19 Dec 2024 17:17:28 +0800 Subject: [PATCH 05/82] [fix](cloud) Adjust rowset state check in `CloudTablet::create_transient_rowset_writer` (#45496) https://github.com/apache/doris/pull/32257 checks if the current rowset state is `BEGIN_PARTIAL_UPDATE` in `CloudTablet::create_transient_rowset_writer`. But if this is a retry calculate task, the rowset's state may have been changed to `COMMITTED` in the first try. This PR adjust this check to avoid DCHECK fails. --- be/src/cloud/cloud_tablet.cpp | 23 ++-- .../test_cloud_mow_partial_update_retry.out | 16 +++ ...test_cloud_mow_partial_update_retry.groovy | 100 ++++++++++++++++++ 3 files changed, 131 insertions(+), 8 deletions(-) create mode 100644 regression-test/data/fault_injection_p0/cloud/test_cloud_mow_partial_update_retry.out create mode 100644 regression-test/suites/fault_injection_p0/cloud/test_cloud_mow_partial_update_retry.groovy diff --git a/be/src/cloud/cloud_tablet.cpp b/be/src/cloud/cloud_tablet.cpp index 93c7128756738c..c7d3170726b2d5 100644 --- a/be/src/cloud/cloud_tablet.cpp +++ b/be/src/cloud/cloud_tablet.cpp @@ -54,6 +54,7 @@ namespace doris { using namespace ErrorCode; static constexpr int COMPACTION_DELETE_BITMAP_LOCK_ID = -1; +static constexpr int LOAD_INITIATOR_ID = -1; CloudTablet::CloudTablet(CloudStorageEngine& engine, TabletMetaSharedPtr tablet_meta) : BaseTablet(std::move(tablet_meta)), _engine(engine) {} @@ -504,13 +505,19 @@ Result> CloudTablet::create_rowset_writer( Result> CloudTablet::create_transient_rowset_writer( const Rowset& rowset, std::shared_ptr partial_update_info, int64_t txn_expiration) { - if (rowset.rowset_meta()->rowset_state() != RowsetStatePB::BEGIN_PARTIAL_UPDATE) [[unlikely]] { - // May cause the segment files generated by the transient rowset writer unable to be - // recycled, see `CloudRowsetWriter::build` for detail. - LOG(WARNING) << "Wrong rowset state: " << rowset.rowset_meta()->rowset_state(); - DCHECK(false) << rowset.rowset_meta()->rowset_state(); + if (rowset.rowset_meta_state() != RowsetStatePB::BEGIN_PARTIAL_UPDATE && + rowset.rowset_meta_state() != RowsetStatePB::COMMITTED) [[unlikely]] { + auto msg = fmt::format( + "wrong rowset state when create_transient_rowset_writer, rowset state should be " + "BEGIN_PARTIAL_UPDATE or COMMITTED, but found {}, rowset_id={}, tablet_id={}", + RowsetStatePB_Name(rowset.rowset_meta_state()), rowset.rowset_id().to_string(), + tablet_id()); + // see `CloudRowsetWriter::build` for detail. + // if this is in a retry task, the rowset state may have been changed to RowsetStatePB::COMMITTED + // in `RowsetMeta::merge_rowset_meta()` in previous trials. + LOG(WARNING) << msg; + DCHECK(false) << msg; } - RowsetWriterContext context; context.rowset_state = PREPARED; context.segments_overlap = OVERLAPPING; @@ -719,8 +726,8 @@ Status CloudTablet::save_delete_bitmap(const TabletTxnInfo* txn_info, int64_t tx } auto ms_lock_id = lock_id == -1 ? txn_id : lock_id; - RETURN_IF_ERROR(_engine.meta_mgr().update_delete_bitmap( - *this, ms_lock_id, COMPACTION_DELETE_BITMAP_LOCK_ID, new_delete_bitmap.get())); + RETURN_IF_ERROR(_engine.meta_mgr().update_delete_bitmap(*this, ms_lock_id, LOAD_INITIATOR_ID, + new_delete_bitmap.get())); // store the delete bitmap with sentinel marks in txn_delete_bitmap_cache because if the txn is retried for some reason, // it will use the delete bitmap from txn_delete_bitmap_cache when re-calculating the delete bitmap, during which it will do diff --git a/regression-test/data/fault_injection_p0/cloud/test_cloud_mow_partial_update_retry.out b/regression-test/data/fault_injection_p0/cloud/test_cloud_mow_partial_update_retry.out new file mode 100644 index 00000000000000..3b24419bdc6fc1 --- /dev/null +++ b/regression-test/data/fault_injection_p0/cloud/test_cloud_mow_partial_update_retry.out @@ -0,0 +1,16 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !sql -- +1 1 1 1 +2 2 2 2 +3 3 3 2 + +-- !sql -- +1 1 888 1 +2 2 777 2 +3 3 3 2 + +-- !sql -- +1 999 888 1 +2 666 777 2 +3 3 3 2 + diff --git a/regression-test/suites/fault_injection_p0/cloud/test_cloud_mow_partial_update_retry.groovy b/regression-test/suites/fault_injection_p0/cloud/test_cloud_mow_partial_update_retry.groovy new file mode 100644 index 00000000000000..13abaf1ffcabd3 --- /dev/null +++ b/regression-test/suites/fault_injection_p0/cloud/test_cloud_mow_partial_update_retry.groovy @@ -0,0 +1,100 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_cloud_mow_partial_update_retry", "nonConcurrent") { + if (!isCloudMode()) { + return + } + + GetDebugPoint().clearDebugPointsForAllFEs() + GetDebugPoint().clearDebugPointsForAllBEs() + + def customFeConfig = [ + delete_bitmap_lock_expiration_seconds : 10, + calculate_delete_bitmap_task_timeout_seconds : 15, + ] + + setFeConfigTemporary(customFeConfig) { + + def table1 = "test_cloud_mow_partial_update_retry" + sql "DROP TABLE IF EXISTS ${table1} FORCE;" + sql """ CREATE TABLE IF NOT EXISTS ${table1} ( + `k1` int NOT NULL, + `c1` int, + `c2` int, + `c3` int + )UNIQUE KEY(k1) + DISTRIBUTED BY HASH(k1) BUCKETS 1 + PROPERTIES ( + "enable_unique_key_merge_on_write" = "true", + "disable_auto_compaction" = "true", + "replication_num" = "1"); """ + + sql "insert into ${table1} values(1,1,1,1);" + sql "insert into ${table1} values(2,2,2,2);" + sql "insert into ${table1} values(3,3,3,2);" + sql "sync;" + qt_sql "select * from ${table1} order by k1;" + + try { + // block the first load + GetDebugPoint().enableDebugPointForAllBEs("BaseTablet::update_delete_bitmap.enable_spin_wait", [token: "token1"]) + GetDebugPoint().enableDebugPointForAllBEs("BaseTablet::update_delete_bitmap.block", [wait_token: "token1"]) + + // the first load + t1 = Thread.start { + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + sql "insert into ${table1}(k1,c1) values(1,999),(2,666);" + } + + // wait util the first partial update load's delete bitmap update lock expired + // to ensure that the second load can take the delete bitmap update lock + // Config.delete_bitmap_lock_expiration_seconds = 10s + Thread.sleep(11 * 1000) + + // the second load + GetDebugPoint().enableDebugPointForAllBEs("BaseTablet::update_delete_bitmap.enable_spin_wait", [token: "token2"]) + Thread.sleep(200) + + sql "set enable_unique_key_partial_update=true;" + sql "sync;" + sql "insert into ${table1}(k1,c2) values(1,888),(2,777);" + + qt_sql "select * from ${table1} order by k1;" + + + // keep waiting util the delete bitmap calculation timeout(Config.calculate_delete_bitmap_task_timeout_seconds = 15s) + // and the first load will retry the calculation of delete bitmap + Thread.sleep(15 * 1000) + + // let the first partial update load finish + GetDebugPoint().enableDebugPointForAllBEs("BaseTablet::update_delete_bitmap.block") + t1.join() + + Thread.sleep(1000) + + qt_sql "select * from ${table1} order by k1;" + + } catch(Exception e) { + logger.info(e.getMessage()) + throw e + } finally { + GetDebugPoint().clearDebugPointsForAllBEs() + } + } +} From 0e2de34bdc8e55382526660cdc9d1e8eed9b8033 Mon Sep 17 00:00:00 2001 From: Sridhar R Manikarnike Date: Thu, 19 Dec 2024 15:00:55 +0530 Subject: [PATCH 06/82] [Enhancement] (nereids)implement DropCatalogCommand in nereids (#45372) Issue Number: close #42613 --- .../org/apache/doris/nereids/DorisParser.g4 | 2 +- .../apache/doris/datasource/CatalogMgr.java | 22 ++++-- .../nereids/parser/LogicalPlanBuilder.java | 9 +++ .../doris/nereids/trees/plans/PlanType.java | 1 + .../plans/commands/DropCatalogCommand.java | 77 +++++++++++++++++++ .../trees/plans/visitor/CommandVisitor.java | 5 ++ .../nereids_p0/test_drop_catalog_command.out | 4 + .../test_drop_catalog_command.groovy | 43 +++++++++++ 8 files changed, 155 insertions(+), 8 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropCatalogCommand.java create mode 100644 regression-test/data/nereids_p0/test_drop_catalog_command.out create mode 100644 regression-test/suites/nereids_p0/test_drop_catalog_command.groovy diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index 0cd32f3820fda4..b2e3eca37e006b 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -220,6 +220,7 @@ supportedDropStatement | DROP SQL_BLOCK_RULE (IF EXISTS)? identifierSeq #dropSqlBlockRule | DROP USER (IF EXISTS)? userIdentify #dropUser | DROP WORKLOAD GROUP (IF EXISTS)? name=identifierOrText #dropWorkloadGroup + | DROP CATALOG (IF EXISTS)? name=identifier #dropCatalog | DROP FILE name=STRING_LITERAL ((FROM | IN) database=identifier)? properties=propertyClause #dropFile | DROP WORKLOAD POLICY (IF EXISTS)? name=identifierOrText #dropWorkloadPolicy @@ -689,7 +690,6 @@ fromRollup unsupportedDropStatement : DROP (DATABASE | SCHEMA) (IF EXISTS)? name=multipartIdentifier FORCE? #dropDatabase - | DROP CATALOG (IF EXISTS)? name=identifier #dropCatalog | DROP (GLOBAL | SESSION | LOCAL)? FUNCTION (IF EXISTS)? functionIdentifier LEFT_PAREN functionArguments? RIGHT_PAREN #dropFunction | DROP TABLE (IF EXISTS)? name=multipartIdentifier FORCE? #dropTable diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java index 0203aa7020b090..f90a2a32fdc3ec 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/CatalogMgr.java @@ -275,23 +275,24 @@ public void createCatalog(CreateCatalogStmt stmt) throws UserException { /** * Remove the catalog instance by name and write the meta log. */ - public void dropCatalog(DropCatalogStmt stmt) throws UserException { + public void dropCatalog(String catalogName, boolean ifExists) throws UserException { writeLock(); try { - if (stmt.isSetIfExists() && !nameToCatalog.containsKey(stmt.getCatalogName())) { - LOG.warn("Non catalog {} is found.", stmt.getCatalogName()); + if (ifExists && !nameToCatalog.containsKey(catalogName)) { + LOG.warn("Non catalog {} is found.", catalogName); return; } - CatalogIf> catalog = nameToCatalog.get(stmt.getCatalogName()); + CatalogIf> catalog = nameToCatalog.get(catalogName); if (catalog == null) { - throw new DdlException("No catalog found with name: " + stmt.getCatalogName()); + throw new DdlException("No catalog found with name: " + catalogName); } - CatalogLog log = CatalogFactory.createCatalogLog(catalog.getId(), stmt); + CatalogLog log = new CatalogLog(); + log.setCatalogId(catalog.getId()); replayDropCatalog(log); Env.getCurrentEnv().getEditLog().logCatalogLog(OperationType.OP_DROP_CATALOG, log); if (ConnectContext.get() != null) { - ConnectContext.get().removeLastDBOfCatalog(stmt.getCatalogName()); + ConnectContext.get().removeLastDBOfCatalog(catalogName); } Env.getCurrentEnv().getQueryStats().clear(catalog.getId()); } finally { @@ -299,6 +300,13 @@ public void dropCatalog(DropCatalogStmt stmt) throws UserException { } } + /** + * Remove the catalog instance by name and write the meta log. + */ + public void dropCatalog(DropCatalogStmt stmt) throws UserException { + dropCatalog(stmt.getCatalogName(), stmt.isSetIfExists()); + } + /** * Modify the catalog name into a new one and write the meta log. */ diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index 68226f156c7f25..3b570fff8e7584 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -120,6 +120,7 @@ import org.apache.doris.nereids.DorisParser.DecimalLiteralContext; import org.apache.doris.nereids.DorisParser.DeleteContext; import org.apache.doris.nereids.DorisParser.DereferenceContext; +import org.apache.doris.nereids.DorisParser.DropCatalogContext; import org.apache.doris.nereids.DorisParser.DropCatalogRecycleBinContext; import org.apache.doris.nereids.DorisParser.DropConstraintContext; import org.apache.doris.nereids.DorisParser.DropEncryptkeyContext; @@ -526,6 +527,7 @@ import org.apache.doris.nereids.trees.plans.commands.CreateWorkloadGroupCommand; import org.apache.doris.nereids.trees.plans.commands.DeleteFromCommand; import org.apache.doris.nereids.trees.plans.commands.DeleteFromUsingCommand; +import org.apache.doris.nereids.trees.plans.commands.DropCatalogCommand; import org.apache.doris.nereids.trees.plans.commands.DropCatalogRecycleBinCommand; import org.apache.doris.nereids.trees.plans.commands.DropCatalogRecycleBinCommand.IdType; import org.apache.doris.nereids.trees.plans.commands.DropConstraintCommand; @@ -4969,6 +4971,13 @@ public LogicalPlan visitDropRole(DropRoleContext ctx) { return new DropRoleCommand(ctx.name.getText(), ctx.EXISTS() != null); } + @Override + public LogicalPlan visitDropCatalog(DropCatalogContext ctx) { + String catalogName = stripQuotes(ctx.name.getText()); + boolean ifExists = ctx.EXISTS() != null; + return new DropCatalogCommand(catalogName, ifExists); + } + @Override public LogicalPlan visitCreateEncryptkey(CreateEncryptkeyContext ctx) { List nameParts = visitMultipartIdentifier(ctx.multipartIdentifier()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index 6395f429db29de..f58a6bf139d2fe 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -149,6 +149,7 @@ public enum PlanType { CREATE_JOB_COMMAND, PAUSE_JOB_COMMAND, CANCEL_JOB_COMMAND, + DROP_CATALOG_COMMAND, DROP_JOB_COMMAND, RESUME_JOB_COMMAND, ALTER_MTMV_COMMAND, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropCatalogCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropCatalogCommand.java new file mode 100644 index 00000000000000..034ecb1053a5f6 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropCatalogCommand.java @@ -0,0 +1,77 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands; + +import org.apache.doris.catalog.Env; +import org.apache.doris.common.AnalysisException; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.common.ErrorReport; +import org.apache.doris.common.util.Util; +import org.apache.doris.datasource.InternalCatalog; +import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.StmtExecutor; + +import java.util.Objects; + +/** + * Command for DROP CATALOG. + */ +public class DropCatalogCommand extends DropCommand { + private final String catalogName; + private final boolean ifExists; + + public DropCatalogCommand(String catalogName, boolean ifExists) { + super(PlanType.DROP_CATALOG_COMMAND); + this.catalogName = Objects.requireNonNull(catalogName, "Catalog name cannot be null"); + this.ifExists = ifExists; + } + + @Override + public void doRun(ConnectContext ctx, StmtExecutor executor) throws Exception { + // Validate the catalog name + Util.checkCatalogAllRules(catalogName); + + if (catalogName.equals(InternalCatalog.INTERNAL_CATALOG_NAME)) { + throw new AnalysisException("Internal catalog can't be drop."); + } + + if (!Env.getCurrentEnv().getAccessManager().checkCtlPriv( + ConnectContext.get(), catalogName, PrivPredicate.DROP)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_CATALOG_ACCESS_DENIED, + ConnectContext.get().getQualifiedUser(), catalogName); + } + + Env.getCurrentEnv().getCatalogMgr().dropCatalog(catalogName, ifExists); + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitDropCatalogCommand(this, context); + } + + public String getCatalogName() { + return catalogName; + } + + public boolean isIfExists() { + return ifExists; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index a9340894c33590..9c2839b3784093 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -56,6 +56,7 @@ import org.apache.doris.nereids.trees.plans.commands.CreateWorkloadGroupCommand; import org.apache.doris.nereids.trees.plans.commands.DeleteFromCommand; import org.apache.doris.nereids.trees.plans.commands.DeleteFromUsingCommand; +import org.apache.doris.nereids.trees.plans.commands.DropCatalogCommand; import org.apache.doris.nereids.trees.plans.commands.DropCatalogRecycleBinCommand; import org.apache.doris.nereids.trees.plans.commands.DropConstraintCommand; import org.apache.doris.nereids.trees.plans.commands.DropEncryptkeyCommand; @@ -345,6 +346,10 @@ default R visitAlterViewCommand(AlterViewCommand alterViewCommand, C context) { return visitCommand(alterViewCommand, context); } + default R visitDropCatalogCommand(DropCatalogCommand dropCatalogCommand, C context) { + return visitCommand(dropCatalogCommand, context); + } + default R visitAlterCatalogCommentCommand(AlterCatalogCommentCommand alterCatalogCommentCommand, C context) { return visitCommand(alterCatalogCommentCommand, context); } diff --git a/regression-test/data/nereids_p0/test_drop_catalog_command.out b/regression-test/data/nereids_p0/test_drop_catalog_command.out new file mode 100644 index 00000000000000..ddb84e0eb356f7 --- /dev/null +++ b/regression-test/data/nereids_p0/test_drop_catalog_command.out @@ -0,0 +1,4 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !cmd -- +test_drop_catalog \nCREATE CATALOG `test_drop_catalog` PROPERTIES (\n"type" = "es",\n"hosts" = "http://127.0.0.1:9200"\n); + diff --git a/regression-test/suites/nereids_p0/test_drop_catalog_command.groovy b/regression-test/suites/nereids_p0/test_drop_catalog_command.groovy new file mode 100644 index 00000000000000..a936e52208ea44 --- /dev/null +++ b/regression-test/suites/nereids_p0/test_drop_catalog_command.groovy @@ -0,0 +1,43 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_drop_catalog_command", "nereids_p0") { + def catalogName = "test_drop_catalog" + def catalogProperties = "\"type\"=\"es\", \"hosts\"=\"http://127.0.0.1:9200\"" + + try { + // Drop catalog if it already exists + checkNereidsExecute("DROP CATALOG IF EXISTS ${catalogName}") + + // Create a new catalog + sql(""" + CREATE CATALOG ${catalogName} + PROPERTIES (${catalogProperties}) + """) + + // Verify the catalog was created + checkNereidsExecute("""SHOW CREATE CATALOG ${catalogName}""") + qt_cmd("""SHOW CREATE CATALOG ${catalogName}""") + + // Drop the catalog + checkNereidsExecute("DROP CATALOG ${catalogName}") + } finally { + // Ensure cleanup + checkNereidsExecute("DROP CATALOG IF EXISTS ${catalogName}") + } +} + From a9de07b9ab2226b578821190e846ca4212fc7879 Mon Sep 17 00:00:00 2001 From: Sridhar R Manikarnike Date: Thu, 19 Dec 2024 15:01:22 +0530 Subject: [PATCH 07/82] [Enhancement] (nereids)implement showDataTypesCommand in nereids (#44299) Issue Number: close #42743 --- .../org/apache/doris/nereids/DorisParser.g4 | 2 +- .../nereids/parser/LogicalPlanBuilder.java | 7 ++ .../doris/nereids/trees/plans/PlanType.java | 1 + .../plans/commands/ShowDataTypesCommand.java | 102 ++++++++++++++++++ .../trees/plans/visitor/CommandVisitor.java | 5 + .../nereids_p0/show/test_show_data_types.out | 31 ++++++ .../show/test_show_data_types.groovy | 29 +++++ 7 files changed, 176 insertions(+), 1 deletion(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowDataTypesCommand.java create mode 100644 regression-test/data/nereids_p0/show/test_show_data_types.out create mode 100644 regression-test/suites/nereids_p0/show/test_show_data_types.groovy diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index b2e3eca37e006b..93bf6050970930 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -262,6 +262,7 @@ supportedShowStatement | SHOW COLLATION wildWhere? #showCollation | SHOW SQL_BLOCK_RULE (FOR ruleName=identifier)? #showSqlBlockRule | SHOW CREATE VIEW name=multipartIdentifier #showCreateView + | SHOW DATA TYPES #showDataTypes | SHOW CREATE MATERIALIZED VIEW mvName=identifier ON tableName=multipartIdentifier #showCreateMaterializedView | SHOW (WARNINGS | ERRORS) limitClause? #showWarningErrors @@ -330,7 +331,6 @@ unsupportedShowStatement LEFT_PAREN functionArguments? RIGHT_PAREN ((FROM | IN) database=multipartIdentifier)? #showCreateFunction | SHOW (DATABASES | SCHEMAS) (FROM catalog=identifier)? wildWhere? #showDatabases - | SHOW DATA TYPES #showDataTypes | SHOW CATALOGS wildWhere? #showCatalogs | SHOW CATALOG name=identifier #showCatalog | SHOW FULL? (COLUMNS | FIELDS) (FROM | IN) tableName=multipartIdentifier diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index 3b570fff8e7584..34f760ff4f524e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -257,6 +257,7 @@ import org.apache.doris.nereids.DorisParser.ShowCreateTableContext; import org.apache.doris.nereids.DorisParser.ShowCreateViewContext; import org.apache.doris.nereids.DorisParser.ShowDataSkewContext; +import org.apache.doris.nereids.DorisParser.ShowDataTypesContext; import org.apache.doris.nereids.DorisParser.ShowDatabaseIdContext; import org.apache.doris.nereids.DorisParser.ShowDeleteContext; import org.apache.doris.nereids.DorisParser.ShowDiagnoseTabletContext; @@ -575,6 +576,7 @@ import org.apache.doris.nereids.trees.plans.commands.ShowCreateTableCommand; import org.apache.doris.nereids.trees.plans.commands.ShowCreateViewCommand; import org.apache.doris.nereids.trees.plans.commands.ShowDataSkewCommand; +import org.apache.doris.nereids.trees.plans.commands.ShowDataTypesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowDatabaseIdCommand; import org.apache.doris.nereids.trees.plans.commands.ShowDeleteCommand; import org.apache.doris.nereids.trees.plans.commands.ShowDiagnoseTabletCommand; @@ -4498,6 +4500,11 @@ public LogicalPlan visitShowLoadProfile(ShowLoadProfileContext ctx) { return new ShowLoadProfileCommand(ctx.loadIdPath.getText()); } + @Override + public LogicalPlan visitShowDataTypes(ShowDataTypesContext ctx) { + return new ShowDataTypesCommand(); + } + @Override public LogicalPlan visitShowGrants(ShowGrantsContext ctx) { boolean all = (ctx.ALL() != null) ? true : false; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index f58a6bf139d2fe..8eeac54a853e0f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -224,6 +224,7 @@ public enum PlanType { SHOW_DYNAMIC_PARTITION_COMMAND, SHOW_ENCRYPT_KEYS_COMMAND, SHOW_EVENTS_COMMAND, + SHOW_DATA_TYPES_COMMAND, SHOW_FRONTENDS_COMMAND, SHOW_GRANTS_COMMAND, SHOW_LAST_INSERT_COMMAND, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowDataTypesCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowDataTypesCommand.java new file mode 100644 index 00000000000000..6ce9b781bd37f1 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowDataTypesCommand.java @@ -0,0 +1,102 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands; + +import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.PrimitiveType; +import org.apache.doris.catalog.ScalarType; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.ShowResultSet; +import org.apache.doris.qe.ShowResultSetMetaData; +import org.apache.doris.qe.StmtExecutor; + +import com.google.common.collect.Lists; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; + +/** + * Represents the command for SHOW DATA TYPES. + */ +public class ShowDataTypesCommand extends ShowCommand { + private static final ShowResultSetMetaData META_DATA = + ShowResultSetMetaData.builder() + .addColumn(new Column("TypeName", ScalarType.createVarchar(20))) + .addColumn(new Column("Size", ScalarType.createVarchar(100))) + .build(); + + public ShowDataTypesCommand() { + super(PlanType.SHOW_DATA_TYPES_COMMAND); + } + + /** + * getTypes(). + */ + public static ArrayList getTypes() { + return PrimitiveType.getSupportedTypes(); + } + + /** + * getTypesAvailableInDdl(). + */ + public static List> getTypesAvailableInDdl() { + ArrayList supportedTypes = getTypes(); + List> rows = Lists.newArrayList(); + for (PrimitiveType type : supportedTypes) { + List row = new ArrayList<>(); + if (type.isAvailableInDdl()) { + row.add(type.toString()); + row.add(Integer.toString(type.getSlotSize())); + rows.add(row); + } + } + return rows; + } + + /** + * sortMetaData(). + */ + public void sortMetaData(List> rows) { + Collections.sort(rows, new Comparator>() { + @Override + public int compare(List row1, List row2) { + return row1.get(0).compareTo(row2.get(0)); + } + }); + } + + @Override + public ShowResultSet doRun(ConnectContext ctx, StmtExecutor executor) throws Exception { + List> rows = getTypesAvailableInDdl(); + sortMetaData(rows); + return new ShowResultSet(getMetaData(), rows); + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitShowDataTypesCommand(this, context); + } + + public ShowResultSetMetaData getMetaData() { + return META_DATA; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index 9c2839b3784093..cce1f41e071531 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -102,6 +102,7 @@ import org.apache.doris.nereids.trees.plans.commands.ShowCreateTableCommand; import org.apache.doris.nereids.trees.plans.commands.ShowCreateViewCommand; import org.apache.doris.nereids.trees.plans.commands.ShowDataSkewCommand; +import org.apache.doris.nereids.trees.plans.commands.ShowDataTypesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowDatabaseIdCommand; import org.apache.doris.nereids.trees.plans.commands.ShowDeleteCommand; import org.apache.doris.nereids.trees.plans.commands.ShowDiagnoseTabletCommand; @@ -521,6 +522,10 @@ default R visitCleanAllProfileCommand(CleanAllProfileCommand cleanAllProfileComm return visitCommand(cleanAllProfileCommand, context); } + default R visitShowDataTypesCommand(ShowDataTypesCommand showDataTypesCommand, C context) { + return visitCommand(showDataTypesCommand, context); + } + default R visitShowFrontendsCommand(ShowFrontendsCommand showFrontendsCommand, C context) { return visitCommand(showFrontendsCommand, context); } diff --git a/regression-test/data/nereids_p0/show/test_show_data_types.out b/regression-test/data/nereids_p0/show/test_show_data_types.out new file mode 100644 index 00000000000000..de1d757cbf80cd --- /dev/null +++ b/regression-test/data/nereids_p0/show/test_show_data_types.out @@ -0,0 +1,31 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !cmd -- +AGG_STATE 16 +ARRAY 32 +BIGINT 8 +BITMAP 16 +BOOLEAN 1 +CHAR 16 +DATE 16 +DATETIME 16 +DATETIMEV2 8 +DATEV2 4 +DECIMAL128 16 +DECIMAL32 4 +DECIMAL64 8 +DECIMALV2 16 +DOUBLE 8 +FLOAT 4 +HLL 16 +INT 4 +IPV4 4 +IPV6 16 +JSON 16 +LARGEINT 16 +MAP 24 +QUANTILE_STATE 16 +SMALLINT 2 +STRING 16 +TINYINT 1 +VARCHAR 16 + diff --git a/regression-test/suites/nereids_p0/show/test_show_data_types.groovy b/regression-test/suites/nereids_p0/show/test_show_data_types.groovy new file mode 100644 index 00000000000000..4316fd5545f47f --- /dev/null +++ b/regression-test/suites/nereids_p0/show/test_show_data_types.groovy @@ -0,0 +1,29 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_show_data_types_nereids", "query,datatype") { + try { + // Execute the SHOW DATA TYPES command and verify the output + checkNereidsExecute("SHOW DATA TYPES") + qt_cmd("SHOW DATA TYPES") + } catch (Exception e) { + // Log any exceptions that occur during testing + log.error("Failed to execute SHOW DATA TYPES command", e) + throw e + } +} + From ea6958cb40e225dd3704327964436432696497eb Mon Sep 17 00:00:00 2001 From: Sridhar R Manikarnike Date: Thu, 19 Dec 2024 15:01:40 +0530 Subject: [PATCH 08/82] [Enhancement](nereids)implement showStatusCommand in nereids (#45427) Issue Number: close #42730 --- .../org/apache/doris/nereids/DorisParser.g4 | 2 +- .../nereids/parser/LogicalPlanBuilder.java | 16 +++++ .../doris/nereids/trees/plans/PlanType.java | 1 + .../plans/commands/ShowStatusCommand.java | 61 +++++++++++++++++++ .../trees/plans/visitor/CommandVisitor.java | 5 ++ .../show/test_show_status_command.out | 7 +++ .../show/test_show_status_command.groovy | 31 ++++++++++ 7 files changed, 122 insertions(+), 1 deletion(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowStatusCommand.java create mode 100644 regression-test/data/nereids_p0/show/test_show_status_command.out create mode 100644 regression-test/suites/nereids_p0/show/test_show_status_command.groovy diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index 93bf6050970930..97876c231fec69 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -274,6 +274,7 @@ supportedShowStatement | SHOW DATABASE databaseId=INTEGER_VALUE #showDatabaseId | SHOW TABLE tableId=INTEGER_VALUE #showTableId | SHOW TRASH (ON backend=STRING_LITERAL)? #showTrash + | SHOW (GLOBAL | SESSION | LOCAL)? STATUS #showStatus | SHOW WHITELIST #showWhitelist | SHOW TABLETS BELONG tabletIds+=INTEGER_VALUE (COMMA tabletIds+=INTEGER_VALUE)* #showTabletsBelong @@ -325,7 +326,6 @@ unsupportedShowStatement | SHOW TABLE STATUS ((FROM | IN) database=multipartIdentifier)? wildWhere? #showTableStatus | SHOW FULL? TABLES ((FROM | IN) database=multipartIdentifier)? wildWhere? #showTables | SHOW FULL? VIEWS ((FROM | IN) database=multipartIdentifier)? wildWhere? #showViews - | SHOW (GLOBAL | SESSION | LOCAL)? STATUS wildWhere? #showStatus | SHOW CREATE MATERIALIZED VIEW name=multipartIdentifier #showMaterializedView | SHOW CREATE (GLOBAL | SESSION | LOCAL)? FUNCTION functionIdentifier LEFT_PAREN functionArguments? RIGHT_PAREN diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index 34f760ff4f524e..0332123f9ff584 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -281,6 +281,7 @@ import org.apache.doris.nereids.DorisParser.ShowRolesContext; import org.apache.doris.nereids.DorisParser.ShowSmallFilesContext; import org.apache.doris.nereids.DorisParser.ShowSqlBlockRuleContext; +import org.apache.doris.nereids.DorisParser.ShowStatusContext; import org.apache.doris.nereids.DorisParser.ShowStorageEnginesContext; import org.apache.doris.nereids.DorisParser.ShowSyncJobContext; import org.apache.doris.nereids.DorisParser.ShowTableCreationContext; @@ -599,6 +600,7 @@ import org.apache.doris.nereids.trees.plans.commands.ShowRolesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowSmallFilesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowSqlBlockRuleCommand; +import org.apache.doris.nereids.trees.plans.commands.ShowStatusCommand; import org.apache.doris.nereids.trees.plans.commands.ShowStorageEnginesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowSyncJobCommand; import org.apache.doris.nereids.trees.plans.commands.ShowTableCreationCommand; @@ -5124,6 +5126,20 @@ public LogicalPlan visitAdminCheckTablets(AdminCheckTabletsContext ctx) { return new AdminCheckTabletsCommand(tabletIdLists, properties); } + @Override + public LogicalPlan visitShowStatus(ShowStatusContext ctx) { + String scope = null; + if (ctx.GLOBAL() != null) { + scope = "GLOBAL"; + } else if (ctx.SESSION() != null) { + scope = "SESSION"; + } else if (ctx.LOCAL() != null) { + scope = "LOCAL"; + } + + return new ShowStatusCommand(scope); + } + @Override public LogicalPlan visitShowDataSkew(ShowDataSkewContext ctx) { TableRefInfo tableRefInfo = visitBaseTableRefContext(ctx.baseTableRef()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index 8eeac54a853e0f..dfc129f10b0fd6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -239,6 +239,7 @@ public enum PlanType { SHOW_REPOSITORIES_COMMAND, SHOW_ROLE_COMMAND, SHOW_SMALL_FILES_COMMAND, + SHOW_STATUS_COMMAND, SHOW_STORAGE_ENGINES_COMMAND, SHOW_SYNC_JOB_COMMAND, SHOW_TABLE_ID_COMMAND, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowStatusCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowStatusCommand.java new file mode 100644 index 00000000000000..3ae5643e068c9a --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowStatusCommand.java @@ -0,0 +1,61 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands; + +import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.ScalarType; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.ShowResultSet; +import org.apache.doris.qe.ShowResultSetMetaData; +import org.apache.doris.qe.StmtExecutor; + +import com.google.common.collect.Lists; + +import java.util.List; + +/** + * Command for SHOW STATUS. + */ +public class ShowStatusCommand extends ShowCommand { + private static final ShowResultSetMetaData META_DATA = + ShowResultSetMetaData.builder() + .addColumn(new Column("Variable_name", ScalarType.createVarchar(64))) + .addColumn(new Column("Value", ScalarType.createVarchar(64))) + .build(); + + private final String scope; + + public ShowStatusCommand(String scope) { + super(PlanType.SHOW_STATUS_COMMAND); + this.scope = scope; + } + + @Override + public ShowResultSet doRun(ConnectContext ctx, StmtExecutor executor) throws Exception { + List> rows = Lists.newArrayList(); + return new ShowResultSet(META_DATA, rows); + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitShowStatusCommand(this, context); + } +} + diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index cce1f41e071531..d3749e94d57d0f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -125,6 +125,7 @@ import org.apache.doris.nereids.trees.plans.commands.ShowRolesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowSmallFilesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowSqlBlockRuleCommand; +import org.apache.doris.nereids.trees.plans.commands.ShowStatusCommand; import org.apache.doris.nereids.trees.plans.commands.ShowStorageEnginesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowSyncJobCommand; import org.apache.doris.nereids.trees.plans.commands.ShowTableCreationCommand; @@ -416,6 +417,10 @@ default R visitShowGrantsCommand(ShowGrantsCommand showGrantsCommand, C context) return visitCommand(showGrantsCommand, context); } + default R visitShowStatusCommand(ShowStatusCommand showStatusCommand, C context) { + return visitCommand(showStatusCommand, context); + } + default R visitShowPartitionIdCommand(ShowPartitionIdCommand showPartitionIdCommand, C context) { return visitCommand(showPartitionIdCommand, context); } diff --git a/regression-test/data/nereids_p0/show/test_show_status_command.out b/regression-test/data/nereids_p0/show/test_show_status_command.out new file mode 100644 index 00000000000000..0fbf8d052eda80 --- /dev/null +++ b/regression-test/data/nereids_p0/show/test_show_status_command.out @@ -0,0 +1,7 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !cmd -- + +-- !cmd -- + +-- !cmd -- + diff --git a/regression-test/suites/nereids_p0/show/test_show_status_command.groovy b/regression-test/suites/nereids_p0/show/test_show_status_command.groovy new file mode 100644 index 00000000000000..1b611268057111 --- /dev/null +++ b/regression-test/suites/nereids_p0/show/test_show_status_command.groovy @@ -0,0 +1,31 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_show_status_command", "nereids_p0") { + // Verify SESSION status + checkNereidsExecute("SHOW SESSION STATUS") + qt_cmd("SHOW SESSION STATUS") + + // Verify GLOBAL status + checkNereidsExecute("SHOW GLOBAL STATUS") + qt_cmd("SHOW GLOBAL STATUS") + + // Verify default STATUS (SESSION) + checkNereidsExecute("SHOW STATUS") + qt_cmd("SHOW STATUS") +} + From 549abf4a56e047bbba8dd9c75fa5cc46e67d9ea0 Mon Sep 17 00:00:00 2001 From: Calvin Kirs Date: Thu, 19 Dec 2024 17:37:33 +0800 Subject: [PATCH 09/82] =?UTF-8?q?[Fix](Job)Fix=20redundant=20job=20schedul?= =?UTF-8?q?ing=20by=20preventing=20same=20state=20transitions=20(e.g.,=20R?= =?UTF-8?q?UNNING=20=E2=86=92=20RUNNING)=20(#45495)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What problem does this PR solve? In the current job scheduling logic, invalid state transitions (e.g., RUNNING to RUNNING) are not filtered, which causes redundant scheduling during resume operations. This PR adds a check to ensure that jobs cannot transition to the same state, preventing duplicate scheduling triggers and improving state consistency. --- .../java/org/apache/doris/job/manager/JobManager.java | 6 +++++- .../suites/job_p0/test_base_insert_job.groovy | 9 ++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/job/manager/JobManager.java b/fe/fe-core/src/main/java/org/apache/doris/job/manager/JobManager.java index 2a957775e113b8..ac9f15b9d67fd3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/job/manager/JobManager.java +++ b/fe/fe-core/src/main/java/org/apache/doris/job/manager/JobManager.java @@ -201,9 +201,13 @@ public void alterJobStatus(String jobName, JobStatus jobStatus) throws JobExcept for (T a : jobMap.values()) { if (a.getJobName().equals(jobName)) { try { + if (jobStatus.equals(a.getJobStatus())) { + throw new JobException("Can't change job status to the same status"); + } alterJobStatus(a.getJobId(), jobStatus); } catch (JobException e) { - throw new JobException("unregister job error, jobName:" + jobName); + throw new JobException("Alter job status error, jobName is %s, errorMsg is %s", + jobName, e.getMessage()); } } } diff --git a/regression-test/suites/job_p0/test_base_insert_job.groovy b/regression-test/suites/job_p0/test_base_insert_job.groovy index 33ae28443b290a..1703b355c95019 100644 --- a/regression-test/suites/job_p0/test_base_insert_job.groovy +++ b/regression-test/suites/job_p0/test_base_insert_job.groovy @@ -190,6 +190,11 @@ suite("test_base_insert_job") { // check job status and succeed task count is 1 pressJob.size() == 1 && '1' == onceJob.get(0).get(0) }) + assertThrows(Exception) { + sql """ + RESUME JOB where jobName='press' + """ + } sql """ DROP JOB IF EXISTS where jobname = 'past_start_time' @@ -299,12 +304,10 @@ suite("test_base_insert_job") { assert e.getMessage().contains("Invalid interval time unit: years") } // assert interval time unit is -1 - try { + assertThrows(Exception) { sql """ CREATE JOB test_error_starts ON SCHEDULE every -1 second comment 'test' DO insert into ${tableName} (timestamp, type, user_id) values ('2023-03-18','1','12213'); """ - } catch (Exception e) { - assert e.getMessage().contains("expecting INTEGER_VALUE") } // test keyword as job name From 40c6c61dbbe67b3c11095427b3c5e471dfee0042 Mon Sep 17 00:00:00 2001 From: 924060929 Date: Thu, 19 Dec 2024 17:39:25 +0800 Subject: [PATCH 10/82] [fix](sql cache) fix prepare statement with sql cache throw NullPointerException (#45640) fix prepare statement with sql cache throw NullPointerException: ```shell java.lang.NullPointerException: Cannot read field "originStmt" because the return value of "org.apache.doris.analysis.StatementBase.getOrigStmt()" is null ``` --- .../org/apache/doris/qe/StmtExecutor.java | 3 +- .../cache/prepare_stmt_with_sql_cache.groovy | 57 +++++++++++++++++++ 2 files changed, 59 insertions(+), 1 deletion(-) create mode 100644 regression-test/suites/nereids_p0/cache/prepare_stmt_with_sql_cache.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java index 05df53ed6796cb..5c2566225fe50a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java @@ -1894,7 +1894,8 @@ private void handleQueryStmt() throws Exception { // TODO support arrow flight sql // NOTE: If you want to add another condition about SessionVariable, please consider whether // add to CacheAnalyzer.commonCacheCondition - if (channel != null && !isOutfileQuery && CacheAnalyzer.canUseCache(context.getSessionVariable())) { + if (channel != null && !isOutfileQuery && CacheAnalyzer.canUseCache(context.getSessionVariable()) + && parsedStmt.getOrigStmt() != null && parsedStmt.getOrigStmt().originStmt != null) { if (queryStmt instanceof QueryStmt || queryStmt instanceof LogicalPlanAdapter) { handleCacheStmt(cacheAnalyzer, channel); LOG.info("Query {} finished", DebugUtil.printId(context.queryId)); diff --git a/regression-test/suites/nereids_p0/cache/prepare_stmt_with_sql_cache.groovy b/regression-test/suites/nereids_p0/cache/prepare_stmt_with_sql_cache.groovy new file mode 100644 index 00000000000000..7819a6ca09d719 --- /dev/null +++ b/regression-test/suites/nereids_p0/cache/prepare_stmt_with_sql_cache.groovy @@ -0,0 +1,57 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import com.mysql.cj.ServerPreparedQuery +import com.mysql.cj.jdbc.ConnectionImpl +import com.mysql.cj.jdbc.JdbcStatement +import com.mysql.cj.jdbc.ServerPreparedStatement +import com.mysql.cj.jdbc.StatementImpl +import org.apache.doris.regression.util.JdbcUtils + +import java.lang.reflect.Field +import java.sql.PreparedStatement +import java.sql.ResultSet +import java.util.concurrent.CopyOnWriteArrayList + +suite("prepare_stmt_with_sql_cache") { + + multi_sql """ + drop table if exists test_prepare_stmt_with_sql_cache; + create table test_prepare_stmt_with_sql_cache(id int) + distributed by hash(id) + properties('replication_num'='1'); + + insert into test_prepare_stmt_with_sql_cache select * from numbers('number'='100'); + """ + + def db = (sql "select database()")[0][0].toString() + + def url = getServerPrepareJdbcUrl(context.config.jdbcUrl, db) + + connect(context.config.jdbcUser, context.config.jdbcPassword, url) { + sql "set enable_sql_cache=true" + for (def i in 0..<10) { + try (PreparedStatement pstmt = prepareStatement("select * from test_prepare_stmt_with_sql_cache where id=?")) { + pstmt.setInt(1, i) + try (ResultSet rs = pstmt.executeQuery()) { + def result = JdbcUtils.toList(rs).v1 + logger.info("result: {}", result) + } + } + } + } +} From 24328d1cc2401b62a62f4d89c944a03866e4a252 Mon Sep 17 00:00:00 2001 From: morrySnow Date: Thu, 19 Dec 2024 17:48:37 +0800 Subject: [PATCH 11/82] [opt](Nereids) lock table in ascending order of table IDs (#45045) ### What problem does this PR solve? Problem Summary: Doris's table locks are fair read-write locks. If two threads acquire read locks on tables in different orders and simultaneously a third thread attempts to acquire a write lock on one of these tables, a deadlock can form between the two threads trying to acquire read locks. This PR changes the lock acquisition order for queries to follow the order of table IDs, ensuring that the lock acquisition order for tables is consistent among different threads. ### Release note Execute table locking operations in ascending order of table IDs --- .../java/org/apache/doris/catalog/Column.java | 5 +- .../java/org/apache/doris/catalog/MTMV.java | 8 +- .../org/apache/doris/catalog/OlapTable.java | 30 +-- .../org/apache/doris/catalog/TableIf.java | 134 ++++------ .../doris/common/NereidsSqlCacheManager.java | 32 ++- .../lock/MonitoredReentrantReadWriteLock.java | 13 + .../doris/common/proc/PartitionsProcDir.java | 37 ++- .../doris/common/profile/SummaryProfile.java | 15 +- .../httpv2/rest/StmtExecutionAction.java | 34 +-- .../doris/job/extensions/mtmv/MTMVTask.java | 38 ++- .../org/apache/doris/mtmv/BaseTableInfo.java | 7 + .../java/org/apache/doris/mtmv/MTMVCache.java | 30 ++- .../org/apache/doris/mtmv/MTMVJobManager.java | 11 +- .../apache/doris/mtmv/MTMVPartitionUtil.java | 5 +- .../org/apache/doris/mtmv/MTMVPlanUtil.java | 68 ++--- .../doris/mtmv/MTMVRelationManager.java | 21 +- .../apache/doris/mtmv/MTMVRewriteUtil.java | 4 +- .../apache/doris/nereids/CascadesContext.java | 239 +----------------- .../apache/doris/nereids/NereidsPlanner.java | 126 +++++---- .../apache/doris/nereids/SqlCacheContext.java | 4 + .../doris/nereids/StatementContext.java | 156 ++++++++---- .../UnboundBaseExternalTableSink.java | 6 - .../analyzer/UnboundOneRowRelation.java | 2 +- .../nereids/analyzer/UnboundRelation.java | 6 - .../nereids/analyzer/UnboundResultSink.java | 6 - .../nereids/analyzer/UnboundTVFRelation.java | 6 - .../nereids/analyzer/UnboundTableSink.java | 6 - .../doris/nereids/jobs/executor/Analyzer.java | 35 +-- .../nereids/jobs/executor/TableCollector.java | 71 ++++++ .../doris/nereids/minidump/MinidumpUtils.java | 11 +- .../nereids/parser/LogicalPlanBuilder.java | 3 +- .../apache/doris/nereids/rules/RuleType.java | 9 +- .../nereids/rules/analysis/BindRelation.java | 64 ++--- .../rules/analysis/CollectRelation.java | 228 +++++++++++++++++ .../mv/AsyncMaterializationContext.java | 5 - .../mv/InitMaterializationContextHook.java | 24 +- .../exploration/mv/MaterializedViewUtils.java | 8 +- .../plans/commands/AddConstraintCommand.java | 31 ++- .../trees/plans/commands/CommandUtils.java | 49 ---- .../plans/commands/DropConstraintCommand.java | 28 +- .../commands/ShowConstraintsCommand.java | 15 +- .../plans/commands/info/CreateMTMVInfo.java | 75 +++--- .../info/MTMVPartitionDefinition.java | 20 +- .../insert/InsertIntoTableCommand.java | 131 ++++++---- .../plans/commands/insert/InsertUtils.java | 11 +- .../trees/plans/visitor/TableCollector.java | 122 --------- .../org/apache/doris/qe/SessionVariable.java | 12 + .../org/apache/doris/qe/StmtExecutor.java | 3 + .../tablefunction/MetadataGenerator.java | 37 ++- .../rules/analysis/BindRelationTest.java | 70 +---- .../nereids/trees/plans/PlanVisitorTest.java | 163 ------------ .../doris/nereids/util/PlanChecker.java | 14 +- .../doris/nereids/util/ReadLockTest.java | 11 +- .../apache/doris/qe/OlapQueryCacheTest.java | 2 +- 54 files changed, 1083 insertions(+), 1218 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/executor/TableCollector.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java delete mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CommandUtils.java delete mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/TableCollector.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java index 0ae6a4f8bdb5eb..3ef5f680e94d15 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Column.java @@ -1054,10 +1054,7 @@ public boolean equals(Object obj) { && isKey == other.isKey && isAllowNull == other.isAllowNull && isAutoInc == other.isAutoInc - && getDataType().equals(other.getDataType()) - && getStrLen() == other.getStrLen() - && getPrecision() == other.getPrecision() - && getScale() == other.getScale() + && Objects.equals(type, other.type) && Objects.equals(comment, other.comment) && visible == other.visible && Objects.equals(children, other.children) diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/MTMV.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/MTMV.java index daf1aac333d653..19058df1eb904b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/MTMV.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/MTMV.java @@ -201,7 +201,7 @@ public void addTaskResult(MTMVTask task, MTMVRelation relation, // to connection issues such as S3, so it is directly set to null if (!isReplay) { // shouldn't do this while holding mvWriteLock - mtmvCache = MTMVCache.from(this, MTMVPlanUtil.createMTMVContext(this), true); + mtmvCache = MTMVCache.from(this, MTMVPlanUtil.createMTMVContext(this), true, true); } } catch (Throwable e) { mtmvCache = null; @@ -323,7 +323,7 @@ public MTMVCache getOrGenerateCache(ConnectContext connectionContext) throws Ana MTMVCache mtmvCache; try { // Should new context with ADMIN user - mtmvCache = MTMVCache.from(this, MTMVPlanUtil.createMTMVContext(this), true); + mtmvCache = MTMVCache.from(this, MTMVPlanUtil.createMTMVContext(this), true, false); } finally { connectionContext.setThreadLocalInfo(); } @@ -362,7 +362,7 @@ public MTMVRefreshSnapshot getRefreshSnapshot() { * * @return mvPartitionName ==> mvPartitionKeyDesc */ - public Map generateMvPartitionDescs() throws AnalysisException { + public Map generateMvPartitionDescs() { Map mtmvItems = getAndCopyPartitionItems(); Map result = Maps.newHashMap(); for (Entry entry : mtmvItems.entrySet()) { @@ -392,7 +392,7 @@ public Pair>, Map> calculateDoublyPartit Map baseToMv = Maps.newHashMap(); Map> relatedPartitionDescs = MTMVPartitionUtil .generateRelatedPartitionDescs(mvPartitionInfo, mvProperties); - Map mvPartitionItems = getAndCopyPartitionItemsWithoutLock(); + Map mvPartitionItems = getAndCopyPartitionItems(); for (Entry entry : mvPartitionItems.entrySet()) { Set basePartitionNames = relatedPartitionDescs.getOrDefault(entry.getValue().toPartitionKeyDesc(), Sets.newHashSet()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java index 9f1f455ab35495..477f76301120d2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/OlapTable.java @@ -114,7 +114,6 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; /** @@ -3325,33 +3324,26 @@ public PartitionType getPartitionType() { } @Override - public Map getAndCopyPartitionItems(Optional snapshot) - throws AnalysisException { + public Map getAndCopyPartitionItems(Optional snapshot) { return getAndCopyPartitionItems(); } - public Map getAndCopyPartitionItems() throws AnalysisException { - if (!tryReadLock(1, TimeUnit.MINUTES)) { - throw new AnalysisException("get table read lock timeout, database=" + getDBName() + ",table=" + getName()); - } + public Map getAndCopyPartitionItems() { + readLock(); try { - return getAndCopyPartitionItemsWithoutLock(); + Map res = Maps.newHashMap(); + for (Entry entry : getPartitionInfo().getIdToItem(false).entrySet()) { + Partition partition = idToPartition.get(entry.getKey()); + if (partition != null) { + res.put(partition.getName(), entry.getValue()); + } + } + return res; } finally { readUnlock(); } } - public Map getAndCopyPartitionItemsWithoutLock() throws AnalysisException { - Map res = Maps.newHashMap(); - for (Entry entry : getPartitionInfo().getIdToItem(false).entrySet()) { - Partition partition = idToPartition.get(entry.getKey()); - if (partition != null) { - res.put(partition.getName(), entry.getValue()); - } - } - return res; - } - @Override public List getPartitionColumns(Optional snapshot) { return getPartitionColumns(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java index 4761ac9d86db2a..a93c0818d5c91f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/TableIf.java @@ -213,7 +213,6 @@ default Map getConstraintsMapUnsafe() { } default Set getForeignKeyConstraints() { - readLock(); try { return getConstraintsMapUnsafe().values().stream() .filter(ForeignKeyConstraint.class::isInstance) @@ -221,24 +220,18 @@ default Set getForeignKeyConstraints() { .collect(ImmutableSet.toImmutableSet()); } catch (Exception ignored) { return ImmutableSet.of(); - } finally { - readUnlock(); } } default Map getConstraintsMap() { - readLock(); try { return ImmutableMap.copyOf(getConstraintsMapUnsafe()); } catch (Exception ignored) { return ImmutableMap.of(); - } finally { - readUnlock(); } } default Set getPrimaryKeyConstraints() { - readLock(); try { return getConstraintsMapUnsafe().values().stream() .filter(PrimaryKeyConstraint.class::isInstance) @@ -246,13 +239,10 @@ default Set getPrimaryKeyConstraints() { .collect(ImmutableSet.toImmutableSet()); } catch (Exception ignored) { return ImmutableSet.of(); - } finally { - readUnlock(); } } default Set getUniqueConstraints() { - readLock(); try { return getConstraintsMapUnsafe().values().stream() .filter(UniqueConstraint.class::isInstance) @@ -260,8 +250,6 @@ default Set getUniqueConstraints() { .collect(ImmutableSet.toImmutableSet()); } catch (Exception ignored) { return ImmutableSet.of(); - } finally { - readUnlock(); } } @@ -280,34 +268,24 @@ default void checkConstraintNotExistenceUnsafe(String name, Constraint primaryKe } default void addUniqueConstraint(String name, ImmutableList columns, boolean replay) { - writeLock(); - try { - Map constraintMap = getConstraintsMapUnsafe(); - UniqueConstraint uniqueConstraint = new UniqueConstraint(name, ImmutableSet.copyOf(columns)); - checkConstraintNotExistenceUnsafe(name, uniqueConstraint, constraintMap); - constraintMap.put(name, uniqueConstraint); - if (!replay) { - Env.getCurrentEnv().getEditLog().logAddConstraint( - new AlterConstraintLog(uniqueConstraint, this)); - } - } finally { - writeUnlock(); + Map constraintMap = getConstraintsMapUnsafe(); + UniqueConstraint uniqueConstraint = new UniqueConstraint(name, ImmutableSet.copyOf(columns)); + checkConstraintNotExistenceUnsafe(name, uniqueConstraint, constraintMap); + constraintMap.put(name, uniqueConstraint); + if (!replay) { + Env.getCurrentEnv().getEditLog().logAddConstraint( + new AlterConstraintLog(uniqueConstraint, this)); } } default void addPrimaryKeyConstraint(String name, ImmutableList columns, boolean replay) { - writeLock(); - try { - Map constraintMap = getConstraintsMapUnsafe(); - PrimaryKeyConstraint primaryKeyConstraint = new PrimaryKeyConstraint(name, ImmutableSet.copyOf(columns)); - checkConstraintNotExistenceUnsafe(name, primaryKeyConstraint, constraintMap); - constraintMap.put(name, primaryKeyConstraint); - if (!replay) { - Env.getCurrentEnv().getEditLog().logAddConstraint( - new AlterConstraintLog(primaryKeyConstraint, this)); - } - } finally { - writeUnlock(); + Map constraintMap = getConstraintsMapUnsafe(); + PrimaryKeyConstraint primaryKeyConstraint = new PrimaryKeyConstraint(name, ImmutableSet.copyOf(columns)); + checkConstraintNotExistenceUnsafe(name, primaryKeyConstraint, constraintMap); + constraintMap.put(name, primaryKeyConstraint); + if (!replay) { + Env.getCurrentEnv().getEditLog().logAddConstraint( + new AlterConstraintLog(primaryKeyConstraint, this)); } } @@ -326,26 +304,19 @@ default PrimaryKeyConstraint tryGetPrimaryKeyForForeignKeyUnsafe( default void addForeignConstraint(String name, ImmutableList columns, TableIf referencedTable, ImmutableList referencedColumns, boolean replay) { - writeLock(); - referencedTable.writeLock(); - try { - Map constraintMap = getConstraintsMapUnsafe(); - ForeignKeyConstraint foreignKeyConstraint = - new ForeignKeyConstraint(name, columns, referencedTable, referencedColumns); - checkConstraintNotExistenceUnsafe(name, foreignKeyConstraint, constraintMap); - PrimaryKeyConstraint requirePrimaryKeyName = new PrimaryKeyConstraint(name, - foreignKeyConstraint.getReferencedColumnNames()); - PrimaryKeyConstraint primaryKeyConstraint = - tryGetPrimaryKeyForForeignKeyUnsafe(requirePrimaryKeyName, referencedTable); - primaryKeyConstraint.addForeignTable(this); - constraintMap.put(name, foreignKeyConstraint); - if (!replay) { - Env.getCurrentEnv().getEditLog().logAddConstraint( - new AlterConstraintLog(foreignKeyConstraint, this)); - } - } finally { - referencedTable.writeUnlock(); - writeUnlock(); + Map constraintMap = getConstraintsMapUnsafe(); + ForeignKeyConstraint foreignKeyConstraint = + new ForeignKeyConstraint(name, columns, referencedTable, referencedColumns); + checkConstraintNotExistenceUnsafe(name, foreignKeyConstraint, constraintMap); + PrimaryKeyConstraint requirePrimaryKeyName = new PrimaryKeyConstraint(name, + foreignKeyConstraint.getReferencedColumnNames()); + PrimaryKeyConstraint primaryKeyConstraint = + tryGetPrimaryKeyForForeignKeyUnsafe(requirePrimaryKeyName, referencedTable); + primaryKeyConstraint.addForeignTable(this); + constraintMap.put(name, foreignKeyConstraint); + if (!replay) { + Env.getCurrentEnv().getEditLog().logAddConstraint( + new AlterConstraintLog(foreignKeyConstraint, this)); } } @@ -381,40 +352,31 @@ default void replayDropConstraint(String name) { } default void dropConstraint(String name, boolean replay) { - writeLock(); - try { - Map constraintMap = getConstraintsMapUnsafe(); - if (!constraintMap.containsKey(name)) { - throw new AnalysisException( - String.format("Unknown constraint %s on table %s.", name, this.getName())); - } - Constraint constraint = constraintMap.get(name); - constraintMap.remove(name); - if (constraint instanceof PrimaryKeyConstraint) { - ((PrimaryKeyConstraint) constraint).getForeignTables() - .forEach(t -> t.dropFKReferringPK(this, (PrimaryKeyConstraint) constraint)); - } - if (!replay) { - Env.getCurrentEnv().getEditLog().logDropConstraint(new AlterConstraintLog(constraint, this)); - } - } finally { - writeUnlock(); + Map constraintMap = getConstraintsMapUnsafe(); + if (!constraintMap.containsKey(name)) { + throw new AnalysisException( + String.format("Unknown constraint %s on table %s.", name, this.getName())); + } + Constraint constraint = constraintMap.get(name); + constraintMap.remove(name); + if (constraint instanceof PrimaryKeyConstraint) { + ((PrimaryKeyConstraint) constraint).getForeignTables() + .forEach(t -> t.dropFKReferringPK(this, (PrimaryKeyConstraint) constraint)); + } + if (!replay) { + Env.getCurrentEnv().getEditLog().logDropConstraint(new AlterConstraintLog(constraint, this)); } } default void dropFKReferringPK(TableIf table, PrimaryKeyConstraint constraint) { - writeLock(); - try { - Map constraintMap = getConstraintsMapUnsafe(); - Set fkName = constraintMap.entrySet().stream() - .filter(e -> e.getValue() instanceof ForeignKeyConstraint - && ((ForeignKeyConstraint) e.getValue()).isReferringPK(table, constraint)) - .map(Entry::getKey) - .collect(Collectors.toSet()); - fkName.forEach(constraintMap::remove); - } finally { - writeUnlock(); - } + Map constraintMap = getConstraintsMapUnsafe(); + Set fkName = constraintMap.entrySet().stream() + .filter(e -> e.getValue() instanceof ForeignKeyConstraint + && ((ForeignKeyConstraint) e.getValue()).isReferringPK(table, constraint)) + .map(Entry::getKey) + .collect(Collectors.toSet()); + fkName.forEach(constraintMap::remove); + } /** diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/NereidsSqlCacheManager.java b/fe/fe-core/src/main/java/org/apache/doris/common/NereidsSqlCacheManager.java index cd32b52034a5d4..86a2b875a93d68 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/NereidsSqlCacheManager.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/NereidsSqlCacheManager.java @@ -225,6 +225,10 @@ private Optional tryParseSqlWithoutCheckVariable( SqlCacheContext sqlCacheContext, UserIdentity currentUserIdentity) { Env env = connectContext.getEnv(); + if (!tryLockTables(connectContext, env, sqlCacheContext)) { + return invalidateCache(key); + } + // check table and view and their columns authority if (privilegeChanged(connectContext, env, sqlCacheContext)) { return invalidateCache(key); @@ -378,16 +382,38 @@ private boolean dataMaskPoliciesChanged( return false; } - private boolean privilegeChanged(ConnectContext connectContext, Env env, SqlCacheContext sqlCacheContext) { + /** + * Execute table locking operations in ascending order of table IDs. + * + * @return true if obtain all tables lock. + */ + private boolean tryLockTables(ConnectContext connectContext, Env env, SqlCacheContext sqlCacheContext) { StatementContext currentStatementContext = connectContext.getStatementContext(); + for (FullTableName fullTableName : sqlCacheContext.getUsedTables()) { + TableIf tableIf = findTableIf(env, fullTableName); + if (tableIf == null) { + return false; + } + currentStatementContext.getTables().put(fullTableName.toList(), tableIf); + } + for (FullTableName fullTableName : sqlCacheContext.getUsedViews().keySet()) { + TableIf tableIf = findTableIf(env, fullTableName); + if (tableIf == null) { + return false; + } + currentStatementContext.getTables().put(fullTableName.toList(), tableIf); + } + currentStatementContext.lock(); + return true; + } + + private boolean privilegeChanged(ConnectContext connectContext, Env env, SqlCacheContext sqlCacheContext) { for (Entry> kv : sqlCacheContext.getCheckPrivilegeTablesOrViews().entrySet()) { Set usedColumns = kv.getValue(); TableIf tableIf = findTableIf(env, kv.getKey()); if (tableIf == null) { return true; } - // release when close statementContext - currentStatementContext.addTableReadLock(tableIf); try { UserAuthentication.checkPermission(tableIf, connectContext, usedColumns); } catch (Throwable t) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/lock/MonitoredReentrantReadWriteLock.java b/fe/fe-core/src/main/java/org/apache/doris/common/lock/MonitoredReentrantReadWriteLock.java index 7a6f0db5938b23..de825fbdb3ac23 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/lock/MonitoredReentrantReadWriteLock.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/lock/MonitoredReentrantReadWriteLock.java @@ -17,6 +17,12 @@ package org.apache.doris.common.lock; +import org.apache.doris.common.util.DebugUtil; +import org.apache.doris.qe.ConnectContext; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + import java.util.concurrent.locks.ReentrantReadWriteLock; /** @@ -24,6 +30,8 @@ * monitoring capabilities for read and write locks. */ public class MonitoredReentrantReadWriteLock extends ReentrantReadWriteLock { + + private static final Logger LOG = LogManager.getLogger(MonitoredReentrantReadWriteLock.class); // Monitored read and write lock instances private final ReadLock readLock = new ReadLock(this); private final WriteLock writeLock = new WriteLock(this); @@ -97,6 +105,11 @@ protected WriteLock(ReentrantReadWriteLock lock) { public void lock() { super.lock(); monitor.afterLock(); + if (isFair() && getReadHoldCount() > 0) { + LOG.warn(" read lock count is {}, write lock count is {}, stack is {}, query id is {}", + getReadHoldCount(), getWriteHoldCount(), Thread.currentThread().getStackTrace(), + ConnectContext.get() == null ? "" : DebugUtil.printId(ConnectContext.get().queryId())); + } } /** diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/proc/PartitionsProcDir.java b/fe/fe-core/src/main/java/org/apache/doris/common/proc/PartitionsProcDir.java index 3fd945c013cd5d..3c44874cb7deff 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/proc/PartitionsProcDir.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/proc/PartitionsProcDir.java @@ -35,6 +35,7 @@ import org.apache.doris.catalog.Partition; import org.apache.doris.catalog.PartitionInfo; import org.apache.doris.catalog.PartitionType; +import org.apache.doris.catalog.TableIf; import org.apache.doris.catalog.Type; import org.apache.doris.common.AnalysisException; import org.apache.doris.common.ErrorCode; @@ -44,9 +45,12 @@ import org.apache.doris.common.Pair; import org.apache.doris.common.util.DebugUtil; import org.apache.doris.common.util.ListComparator; +import org.apache.doris.common.util.MetaLockUtils; import org.apache.doris.common.util.OrderByPair; import org.apache.doris.common.util.TimeUtils; +import org.apache.doris.mtmv.BaseTableInfo; import org.apache.doris.mtmv.MTMVPartitionUtil; +import org.apache.doris.mtmv.MTMVUtil; import org.apache.doris.thrift.TCell; import org.apache.doris.thrift.TRow; @@ -59,6 +63,7 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -250,22 +255,38 @@ private List, TRow>> getPartitionInfosInrernal() throws An List, TRow>> partitionInfos = new ArrayList, TRow>>(); Map> partitionsUnSyncTables = null; String mtmvPartitionSyncErrorMsg = null; + + List needLocked = Lists.newArrayList(); + needLocked.add(olapTable); if (olapTable instanceof MTMV) { - try { - partitionsUnSyncTables = MTMVPartitionUtil - .getPartitionsUnSyncTables((MTMV) olapTable); - } catch (AnalysisException e) { - mtmvPartitionSyncErrorMsg = e.getMessage(); + MTMV mtmv = (MTMV) olapTable; + for (BaseTableInfo baseTableInfo : mtmv.getRelation().getBaseTables()) { + try { + TableIf baseTable = MTMVUtil.getTable(baseTableInfo); + needLocked.add(baseTable); + } catch (Exception e) { + // do nothing, ignore not existed table + } } + needLocked.sort(Comparator.comparing(TableIf::getId)); } - olapTable.readLock(); + MetaLockUtils.readLockTables(needLocked); try { + if (olapTable instanceof MTMV) { + try { + partitionsUnSyncTables = MTMVPartitionUtil + .getPartitionsUnSyncTables((MTMV) olapTable); + } catch (AnalysisException e) { + mtmvPartitionSyncErrorMsg = e.getMessage(); + } + } List partitionIds; PartitionInfo tblPartitionInfo = olapTable.getPartitionInfo(); // for range partitions, we return partitions in ascending range order by default. // this is to be consistent with the behaviour before 0.12 - if (tblPartitionInfo.getType() == PartitionType.RANGE || tblPartitionInfo.getType() == PartitionType.LIST) { + if (tblPartitionInfo.getType() == PartitionType.RANGE + || tblPartitionInfo.getType() == PartitionType.LIST) { partitionIds = tblPartitionInfo.getPartitionItemEntryList(isTempPartition, true).stream() .map(Map.Entry::getKey).collect(Collectors.toList()); } else { @@ -402,7 +423,7 @@ private List, TRow>> getPartitionInfosInrernal() throws An partitionInfos.add(Pair.of(partitionInfo, trow)); } } finally { - olapTable.readUnlock(); + MetaLockUtils.readUnlockTables(needLocked); } return partitionInfos; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java b/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java index ecc4c908809161..6a92e043b6eb20 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java @@ -97,6 +97,7 @@ public class SummaryProfile { public static final String GET_TABLE_VERSION_COUNT = "Get Table Version Count"; public static final String PARSE_SQL_TIME = "Parse SQL Time"; + public static final String NEREIDS_LOCK_TABLE_TIME = "Nereids Lock Table Time"; public static final String NEREIDS_ANALYSIS_TIME = "Nereids Analysis Time"; public static final String NEREIDS_REWRITE_TIME = "Nereids Rewrite Time"; public static final String NEREIDS_OPTIMIZE_TIME = "Nereids Optimize Time"; @@ -136,6 +137,7 @@ public class SummaryProfile { // The display order of execution summary items. public static final ImmutableList EXECUTION_SUMMARY_KEYS = ImmutableList.of( PARSE_SQL_TIME, + NEREIDS_LOCK_TABLE_TIME, NEREIDS_ANALYSIS_TIME, NEREIDS_REWRITE_TIME, NEREIDS_OPTIMIZE_TIME, @@ -224,6 +226,8 @@ public class SummaryProfile { private long parseSqlStartTime = -1; @SerializedName(value = "parseSqlFinishTime") private long parseSqlFinishTime = -1; + @SerializedName(value = "nereidsLockTableFinishTime") + private long nereidsLockTableFinishTime = -1; @SerializedName(value = "nereidsAnalysisFinishTime") private long nereidsAnalysisFinishTime = -1; @SerializedName(value = "nereidsRewriteFinishTime") @@ -410,6 +414,7 @@ private void updateSummaryProfile(Map infos) { private void updateExecutionSummaryProfile() { executionSummaryProfile.addInfoString(PARSE_SQL_TIME, getPrettyParseSqlTime()); + executionSummaryProfile.addInfoString(NEREIDS_LOCK_TABLE_TIME, getPrettyNereidsLockTableTime()); executionSummaryProfile.addInfoString(NEREIDS_ANALYSIS_TIME, getPrettyNereidsAnalysisTime()); executionSummaryProfile.addInfoString(NEREIDS_REWRITE_TIME, getPrettyNereidsRewriteTime()); executionSummaryProfile.addInfoString(NEREIDS_OPTIMIZE_TIME, getPrettyNereidsOptimizeTime()); @@ -506,6 +511,10 @@ public void setParseSqlFinishTime(long parseSqlFinishTime) { this.parseSqlFinishTime = parseSqlFinishTime; } + public void setNereidsLockTableFinishTime() { + this.nereidsLockTableFinishTime = TimeUtils.getStartTimeMs(); + } + public void setNereidsAnalysisTime() { this.nereidsAnalysisFinishTime = TimeUtils.getStartTimeMs(); } @@ -766,8 +775,12 @@ public String getPrettyParseSqlTime() { return getPrettyTime(parseSqlFinishTime, parseSqlStartTime, TUnit.TIME_MS); } + public String getPrettyNereidsLockTableTime() { + return getPrettyTime(nereidsLockTableFinishTime, parseSqlStartTime, TUnit.TIME_MS); + } + public String getPrettyNereidsAnalysisTime() { - return getPrettyTime(nereidsAnalysisFinishTime, queryBeginTime, TUnit.TIME_MS); + return getPrettyTime(nereidsAnalysisFinishTime, nereidsLockTableFinishTime, TUnit.TIME_MS); } public String getPrettyNereidsRewriteTime() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/StmtExecutionAction.java b/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/StmtExecutionAction.java index a37d3a11f84c9c..524c228467a194 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/StmtExecutionAction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/httpv2/rest/StmtExecutionAction.java @@ -186,22 +186,26 @@ private ResponseEntity executeQuery(ActionAuthorizationInfo authInfo, boolean is @NotNull private String getSchema(String sql) { LogicalPlan unboundMvPlan = new NereidsParser().parseSingle(sql); - StatementContext statementContext = new StatementContext(ConnectContext.get(), - new OriginStatement(sql, 0)); - NereidsPlanner planner = new NereidsPlanner(statementContext); - if (statementContext.getConnectContext().getStatementContext() == null) { - statementContext.getConnectContext().setStatementContext(statementContext); + try (StatementContext statementContext = new StatementContext(ConnectContext.get(), + new OriginStatement(sql, 0))) { + StatementContext originalContext = ConnectContext.get().getStatementContext(); + try { + ConnectContext.get().setStatementContext(statementContext); + NereidsPlanner planner = new NereidsPlanner(statementContext); + planner.planWithLock(unboundMvPlan, PhysicalProperties.ANY, ExplainCommand.ExplainLevel.ANALYZED_PLAN); + LogicalPlan logicalPlan = (LogicalPlan) planner.getCascadesContext().getRewritePlan(); + + List createStmts = PlanUtils.getLogicalScanFromRootPlan(logicalPlan).stream().map(plan -> { + TableIf tbl = plan.getTable(); + List createTableStmts = Lists.newArrayList(); + Env.getDdlStmt(tbl, createTableStmts, null, null, false, true, -1L); + return createTableStmts.get(0); + }).collect(Collectors.toList()); + return Joiner.on("\n\n").join(createStmts); + } finally { + ConnectContext.get().setStatementContext(originalContext); + } } - planner.planWithLock(unboundMvPlan, PhysicalProperties.ANY, ExplainCommand.ExplainLevel.ANALYZED_PLAN); - LogicalPlan logicalPlan = (LogicalPlan) planner.getCascadesContext().getRewritePlan(); - - List createStmts = PlanUtils.getLogicalScanFromRootPlan(logicalPlan).stream().map(plan -> { - TableIf tbl = plan.getTable(); - List createTableStmts = Lists.newArrayList(); - Env.getDdlStmt(tbl, createTableStmts, null, null, false, true, -1L); - return createTableStmts.get(0); - }).collect(Collectors.toList()); - return Joiner.on("\n\n").join(createStmts); } private static class StmtRequestBody { diff --git a/fe/fe-core/src/main/java/org/apache/doris/job/extensions/mtmv/MTMVTask.java b/fe/fe-core/src/main/java/org/apache/doris/job/extensions/mtmv/MTMVTask.java index c1002faf4078b3..31e6c8353e24b3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/job/extensions/mtmv/MTMVTask.java +++ b/fe/fe-core/src/main/java/org/apache/doris/job/extensions/mtmv/MTMVTask.java @@ -28,6 +28,7 @@ import org.apache.doris.common.Status; import org.apache.doris.common.UserException; import org.apache.doris.common.util.DebugUtil; +import org.apache.doris.common.util.MetaLockUtils; import org.apache.doris.common.util.TimeUtils; import org.apache.doris.datasource.mvcc.MvccSnapshot; import org.apache.doris.datasource.mvcc.MvccTable; @@ -72,6 +73,7 @@ import java.math.BigDecimal; import java.math.RoundingMode; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -180,19 +182,31 @@ public void run() throws JobException { } // Every time a task is run, the relation is regenerated because baseTables and baseViews may change, // such as deleting a table and creating a view with the same name - this.relation = MTMVPlanUtil.generateMTMVRelation(mtmv, ctx); + Set tablesInPlan = MTMVPlanUtil.getBaseTableFromQuery(mtmv.getQuerySql(), ctx); + this.relation = MTMVPlanUtil.generateMTMVRelation(tablesInPlan, ctx); beforeMTMVRefresh(); - if (mtmv.getMvPartitionInfo().getPartitionType() != MTMVPartitionType.SELF_MANAGE) { - MTMVRelatedTableIf relatedTable = mtmv.getMvPartitionInfo().getRelatedTable(); - if (!relatedTable.isValidRelatedTable()) { - throw new JobException("MTMV " + mtmv.getName() + "'s related table " + relatedTable.getName() - + " is not a valid related table anymore, stop refreshing." - + " e.g. Table has multiple partition columns or including not supported transform functions."); + List tableIfs = Lists.newArrayList(tablesInPlan); + tableIfs.sort(Comparator.comparing(TableIf::getId)); + + MTMVRefreshContext context; + // lock table order by id to avoid deadlock + MetaLockUtils.readLockTables(tableIfs); + try { + if (mtmv.getMvPartitionInfo().getPartitionType() != MTMVPartitionType.SELF_MANAGE) { + MTMVRelatedTableIf relatedTable = mtmv.getMvPartitionInfo().getRelatedTable(); + if (!relatedTable.isValidRelatedTable()) { + throw new JobException("MTMV " + mtmv.getName() + "'s related table " + relatedTable.getName() + + " is not a valid related table anymore, stop refreshing." + + " e.g. Table has multiple partition columns" + + " or including not supported transform functions."); + } + MTMVPartitionUtil.alignMvPartition(mtmv); } - MTMVPartitionUtil.alignMvPartition(mtmv); + context = MTMVRefreshContext.buildContext(mtmv); + this.needRefreshPartitions = calculateNeedRefreshPartitions(context); + } finally { + MetaLockUtils.readUnlockTables(tableIfs); } - MTMVRefreshContext context = MTMVRefreshContext.buildContext(mtmv); - this.needRefreshPartitions = calculateNeedRefreshPartitions(context); this.refreshMode = generateRefreshMode(needRefreshPartitions); if (refreshMode == MTMVTaskRefreshMode.NOT_REFRESH) { return; @@ -207,7 +221,7 @@ public void run() throws JobException { int start = i * refreshPartitionNum; int end = start + refreshPartitionNum; Set execPartitionNames = Sets.newHashSet(needRefreshPartitions - .subList(start, end > needRefreshPartitions.size() ? needRefreshPartitions.size() : end)); + .subList(start, Math.min(end, needRefreshPartitions.size()))); // need get names before exec Map execPartitionSnapshots = MTMVPartitionUtil .generatePartitionSnapshots(context, relation.getBaseTablesOneLevel(), execPartitionNames); @@ -217,7 +231,7 @@ public void run() throws JobException { } } catch (Throwable e) { if (getStatus() == TaskStatus.RUNNING) { - LOG.warn("run task failed: ", e.getMessage()); + LOG.warn("run task failed: {}", e.getMessage()); throw new JobException(e.getMessage(), e); } else { // if status is not `RUNNING`,maybe the task was canceled, therefore, it is a normal situation diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/BaseTableInfo.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/BaseTableInfo.java index fcf18d73a269bb..076a4f4e8bb050 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mtmv/BaseTableInfo.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/BaseTableInfo.java @@ -26,11 +26,14 @@ import org.apache.doris.datasource.InternalCatalog; import com.google.common.base.Objects; +import com.google.common.collect.Lists; import com.google.gson.annotations.SerializedName; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.util.List; + public class BaseTableInfo { private static final Logger LOG = LogManager.getLogger(BaseTableInfo.class); @@ -167,4 +170,8 @@ public void compatible(CatalogMgr catalogMgr) { LOG.warn("MTMV compatible failed, ctlId: {}, dbId: {}, tableId: {}", ctlId, dbId, tableId, e); } } + + public List toList() { + return Lists.newArrayList(getCtlName(), getDbName(), getTableName()); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVCache.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVCache.java index d3d7f1ad6ebbf5..b185000c14897e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVCache.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVCache.java @@ -87,23 +87,31 @@ public StructInfo getStructInfo() { return structInfo; } - public static MTMVCache from(MTMV mtmv, ConnectContext connectContext, boolean needCost) { + public static MTMVCache from(MTMV mtmv, ConnectContext connectContext, boolean needCost, boolean needLock) { StatementContext mvSqlStatementContext = new StatementContext(connectContext, new OriginStatement(mtmv.getQuerySql(), 0)); + if (needLock) { + mvSqlStatementContext.setNeedLockTables(false); + } if (mvSqlStatementContext.getConnectContext().getStatementContext() == null) { mvSqlStatementContext.getConnectContext().setStatementContext(mvSqlStatementContext); } LogicalPlan unboundMvPlan = new NereidsParser().parseSingle(mtmv.getQuerySql()); NereidsPlanner planner = new NereidsPlanner(mvSqlStatementContext); - - // Can not convert to table sink, because use the same column from different table when self join - // the out slot is wrong - if (needCost) { - // Only in mv rewrite, we need plan with eliminated cost which is used for mv chosen - planner.planWithLock(unboundMvPlan, PhysicalProperties.ANY, ExplainLevel.ALL_PLAN); - } else { - // No need cost for performance - planner.planWithLock(unboundMvPlan, PhysicalProperties.ANY, ExplainLevel.REWRITTEN_PLAN); + boolean originalRewriteFlag = connectContext.getSessionVariable().enableMaterializedViewRewrite; + connectContext.getSessionVariable().enableMaterializedViewRewrite = false; + try { + // Can not convert to table sink, because use the same column from different table when self join + // the out slot is wrong + if (needCost) { + // Only in mv rewrite, we need plan with eliminated cost which is used for mv chosen + planner.planWithLock(unboundMvPlan, PhysicalProperties.ANY, ExplainLevel.ALL_PLAN); + } else { + // No need cost for performance + planner.planWithLock(unboundMvPlan, PhysicalProperties.ANY, ExplainLevel.REWRITTEN_PLAN); + } + } finally { + connectContext.getSessionVariable().enableMaterializedViewRewrite = originalRewriteFlag; } Plan originPlan = planner.getCascadesContext().getRewritePlan(); // Eliminate result sink because sink operator is useless in query rewrite by materialized view @@ -128,6 +136,6 @@ public Plan visitLogicalResultSink(LogicalResultSink logicalResu new BitSet()); return new MTMVCache(mvPlan, originPlan, planner.getAnalyzedPlan(), needCost ? planner.getCascadesContext().getMemo().getRoot().getStatistics() : null, - structInfoOptional.orElseGet(() -> null)); + structInfoOptional.orElse(null)); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVJobManager.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVJobManager.java index 2c03ad16176fea..a9dee132f64b12 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVJobManager.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVJobManager.java @@ -104,17 +104,18 @@ private void setManualJobConfig(JobExecutionConfiguration jobExecutionConfigurat private void setScheduleJobConfig(JobExecutionConfiguration jobExecutionConfiguration, MTMV mtmv) { jobExecutionConfiguration.setExecuteType(JobExecuteType.RECURRING); + MTMVRefreshInfo refreshMTMVInfo = mtmv.getRefreshInfo(); TimerDefinition timerDefinition = new TimerDefinition(); timerDefinition - .setInterval(mtmv.getRefreshInfo().getRefreshTriggerInfo().getIntervalTrigger().getInterval()); + .setInterval(refreshMTMVInfo.getRefreshTriggerInfo().getIntervalTrigger().getInterval()); timerDefinition - .setIntervalUnit(mtmv.getRefreshInfo().getRefreshTriggerInfo().getIntervalTrigger().getTimeUnit()); + .setIntervalUnit(refreshMTMVInfo.getRefreshTriggerInfo().getIntervalTrigger().getTimeUnit()); if (!StringUtils - .isEmpty(mtmv.getRefreshInfo().getRefreshTriggerInfo().getIntervalTrigger().getStartTime())) { + .isEmpty(refreshMTMVInfo.getRefreshTriggerInfo().getIntervalTrigger().getStartTime())) { timerDefinition.setStartTimeMs(TimeUtils.timeStringToLong( - mtmv.getRefreshInfo().getRefreshTriggerInfo().getIntervalTrigger().getStartTime())); + refreshMTMVInfo.getRefreshTriggerInfo().getIntervalTrigger().getStartTime())); } - if (mtmv.getRefreshInfo().getBuildMode().equals(BuildMode.IMMEDIATE)) { + if (refreshMTMVInfo.getBuildMode().equals(BuildMode.IMMEDIATE)) { jobExecutionConfiguration.setImmediate(true); } jobExecutionConfiguration.setTimerDefinition(timerDefinition); diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionUtil.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionUtil.java index 9597378c488cfc..8f715b1b0fb3a0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionUtil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPartitionUtil.java @@ -536,11 +536,12 @@ private static Map getPartitionVersions(MTMV mtmv) throws Analysis private static Map getTableVersions(MTMV mtmv) { Map res = Maps.newHashMap(); - if (mtmv.getRelation() == null || mtmv.getRelation().getBaseTablesOneLevel() == null) { + MTMVRelation relation = mtmv.getRelation(); + if (relation == null || relation.getBaseTablesOneLevel() == null) { return res; } List olapTables = Lists.newArrayList(); - for (BaseTableInfo baseTableInfo : mtmv.getRelation().getBaseTablesOneLevel()) { + for (BaseTableInfo baseTableInfo : relation.getBaseTablesOneLevel()) { TableIf table = null; try { table = MTMVUtil.getTable(baseTableInfo); diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPlanUtil.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPlanUtil.java index 35c06e74d3cc80..3264d6627ead5d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPlanUtil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVPlanUtil.java @@ -33,11 +33,8 @@ import org.apache.doris.nereids.parser.NereidsParser; import org.apache.doris.nereids.properties.PhysicalProperties; import org.apache.doris.nereids.rules.RuleType; -import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.commands.ExplainCommand.ExplainLevel; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; -import org.apache.doris.nereids.trees.plans.visitor.TableCollector; -import org.apache.doris.nereids.trees.plans.visitor.TableCollector.TableCollectorContext; import org.apache.doris.qe.ConnectContext; import com.google.common.collect.ImmutableSet; @@ -99,41 +96,27 @@ private static void setCatalogAndDb(ConnectContext ctx, MTMV mtmv) { ctx.setDatabase(databaseIf.get().getFullName()); } - public static MTMVRelation generateMTMVRelation(MTMV mtmv, ConnectContext ctx) { - // Should not make table without data to empty relation when analyze the related table, - // so add disable rules - Plan plan = getAnalyzePlanBySql(mtmv.getQuerySql(), ctx); - return generateMTMVRelation(plan, ctx); - } - - public static MTMVRelation generateMTMVRelation(Plan plan, ConnectContext connectContext) { - return new MTMVRelation(getBaseTables(plan, true, connectContext), - getBaseTables(plan, false, connectContext), getBaseViews(plan)); - } - - private static Set getBaseTables(Plan plan, boolean expand, ConnectContext connectContext) { - TableCollectorContext collectorContext = - new TableCollector.TableCollectorContext( - com.google.common.collect.Sets - .newHashSet(TableType.values()), expand, connectContext); - plan.accept(TableCollector.INSTANCE, collectorContext); - Set collectedTables = collectorContext.getCollectedTables(); - return transferTableIfToInfo(collectedTables); - } - - private static Set getBaseViews(Plan plan) { - return Sets.newHashSet(); - } - - private static Set transferTableIfToInfo(Set tables) { - Set result = com.google.common.collect.Sets.newHashSet(); - for (TableIf table : tables) { - result.add(new BaseTableInfo(table)); + public static MTMVRelation generateMTMVRelation(Set tablesInPlan, ConnectContext ctx) { + Set oneLevelTables = Sets.newHashSet(); + Set allLevelTables = Sets.newHashSet(); + Set oneLevelViews = Sets.newHashSet(); + for (TableIf table : tablesInPlan) { + BaseTableInfo baseTableInfo = new BaseTableInfo(table); + if (table.getType() == TableType.VIEW) { + // TODO reopen it after we support mv on view + // oneLevelViews.add(baseTableInfo); + } else { + oneLevelTables.add(baseTableInfo); + allLevelTables.add(baseTableInfo); + if (table instanceof MTMV) { + allLevelTables.addAll(((MTMV) table).getRelation().getBaseTables()); + } + } } - return result; + return new MTMVRelation(allLevelTables, oneLevelTables, oneLevelViews); } - private static Plan getAnalyzePlanBySql(String querySql, ConnectContext ctx) { + public static Set getBaseTableFromQuery(String querySql, ConnectContext ctx) { List statements; try { statements = new NereidsParser().parseSQL(querySql); @@ -143,12 +126,15 @@ private static Plan getAnalyzePlanBySql(String querySql, ConnectContext ctx) { StatementBase parsedStmt = statements.get(0); LogicalPlan logicalPlan = ((LogicalPlanAdapter) parsedStmt).getLogicalPlan(); StatementContext original = ctx.getStatementContext(); - ctx.setStatementContext(new StatementContext()); - try { - NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext()); - return planner.planWithLock(logicalPlan, PhysicalProperties.ANY, ExplainLevel.ANALYZED_PLAN); - } finally { - ctx.setStatementContext(original); + try (StatementContext tempCtx = new StatementContext()) { + ctx.setStatementContext(tempCtx); + try { + NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext()); + planner.planWithLock(logicalPlan, PhysicalProperties.ANY, ExplainLevel.ANALYZED_PLAN); + return Sets.newHashSet(ctx.getStatementContext().getTables().values()); + } finally { + ctx.setStatementContext(original); + } } } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVRelationManager.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVRelationManager.java index 436427526ba08b..f8f92e25d38d65 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVRelationManager.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVRelationManager.java @@ -59,8 +59,8 @@ public class MTMVRelationManager implements MTMVHookService { // create mv2 as select * from mv1; // `tableMTMVs` will have 3 pair: table1 ==> mv1,mv1==>mv2, table1 ==> mv2 // `tableMTMVsOneLevel` will have 2 pair: table1 ==> mv1,mv1==>mv2 - private Map> tableMTMVs = Maps.newConcurrentMap(); - private Map> tableMTMVsOneLevel = Maps.newConcurrentMap(); + private final Map> tableMTMVs = Maps.newConcurrentMap(); + private final Map> tableMTMVsOneLevel = Maps.newConcurrentMap(); public Set getMtmvsByBaseTable(BaseTableInfo table) { return tableMTMVs.getOrDefault(table, ImmutableSet.of()); @@ -98,6 +98,23 @@ public Set getAvailableMTMVs(List tableInfos, ConnectContex return res; } + /** + * get all mtmv related to tableInfos. + */ + public Set getAllMTMVs(List tableInfos) { + Set mtmvs = Sets.newLinkedHashSet(); + Set mvInfos = getMTMVInfos(tableInfos); + for (BaseTableInfo tableInfo : mvInfos) { + try { + mtmvs.add((MTMV) MTMVUtil.getTable(tableInfo)); + } catch (AnalysisException e) { + // not throw exception to client, just ignore it + LOG.warn("getTable failed: {}", tableInfo.toString(), e); + } + } + return mtmvs; + } + @VisibleForTesting public boolean isMVPartitionValid(MTMV mtmv, ConnectContext ctx, boolean forceConsistent) { long currentTimeMillis = System.currentTimeMillis(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVRewriteUtil.java b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVRewriteUtil.java index 7b7d743a36bc8c..ff1b3263d3409e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVRewriteUtil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/mtmv/MTMVRewriteUtil.java @@ -51,8 +51,8 @@ public static Collection getMTMVCanRewritePartitions(MTMV mtmv, Conne return res; } // check mv is normal - if (mtmv.getStatus().getState() != MTMVState.NORMAL - || mtmv.getStatus().getRefreshState() == MTMVRefreshState.INIT) { + MTMVStatus mtmvStatus = mtmv.getStatus(); + if (mtmvStatus.getState() != MTMVState.NORMAL || mtmvStatus.getRefreshState() == MTMVRefreshState.INIT) { return res; } MTMVRefreshContext refreshContext = null; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java index bb10996a11bf6a..258704763909f1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java @@ -17,20 +17,13 @@ package org.apache.doris.nereids; -import org.apache.doris.catalog.DatabaseIf; -import org.apache.doris.catalog.Env; -import org.apache.doris.catalog.TableIf; import org.apache.doris.common.Pair; -import org.apache.doris.datasource.CatalogIf; import org.apache.doris.nereids.analyzer.Scope; -import org.apache.doris.nereids.analyzer.UnboundOneRowRelation; -import org.apache.doris.nereids.analyzer.UnboundRelation; -import org.apache.doris.nereids.analyzer.UnboundTableSink; -import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.hint.Hint; import org.apache.doris.nereids.jobs.Job; import org.apache.doris.nereids.jobs.JobContext; import org.apache.doris.nereids.jobs.executor.Analyzer; +import org.apache.doris.nereids.jobs.executor.TableCollector; import org.apache.doris.nereids.jobs.rewrite.RewriteBottomUpJob; import org.apache.doris.nereids.jobs.rewrite.RewriteTopDownJob; import org.apache.doris.nereids.jobs.rewrite.RootPlanTreeRewriteJob.RootRewriteJobContext; @@ -46,7 +39,6 @@ import org.apache.doris.nereids.properties.PhysicalProperties; import org.apache.doris.nereids.rules.RuleFactory; import org.apache.doris.nereids.rules.RuleSet; -import org.apache.doris.nereids.rules.analysis.BindRelation.CustomTableResolver; import org.apache.doris.nereids.rules.exploration.mv.MaterializationContext; import org.apache.doris.nereids.trees.expressions.CTEId; import org.apache.doris.nereids.trees.expressions.Expression; @@ -54,13 +46,7 @@ import org.apache.doris.nereids.trees.expressions.SubqueryExpr; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.RelationId; -import org.apache.doris.nereids.trees.plans.logical.LogicalCTE; import org.apache.doris.nereids.trees.plans.logical.LogicalCTEConsumer; -import org.apache.doris.nereids.trees.plans.logical.LogicalFilter; -import org.apache.doris.nereids.trees.plans.logical.LogicalHaving; -import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; -import org.apache.doris.nereids.trees.plans.logical.LogicalProject; -import org.apache.doris.nereids.trees.plans.logical.LogicalSubQueryAlias; import org.apache.doris.qe.ConnectContext; import org.apache.doris.qe.SessionVariable; import org.apache.doris.statistics.ColumnStatistic; @@ -70,7 +56,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; -import org.apache.commons.collections.MapUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -84,8 +69,6 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; -import java.util.Stack; -import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nullable; @@ -103,7 +86,7 @@ public class CascadesContext implements ScheduleContext { private Memo memo; private final StatementContext statementContext; - private final CTEContext cteContext; + private CTEContext cteContext; private final RuleSet ruleSet; private final JobPool jobPool; private final JobScheduler jobScheduler; @@ -113,7 +96,6 @@ public class CascadesContext implements ScheduleContext { private final RuntimeFilterContext runtimeFilterContext; private final TopnFilterContext topnFilterContext = new TopnFilterContext(); private Optional outerScope = Optional.empty(); - private Map, TableIf> tables = null; private boolean isRewriteRoot; private volatile boolean isTimeout = false; @@ -169,9 +151,6 @@ private CascadesContext(Optional parent, Optional curren } else { this.isEnableExprTrace = false; } - if (parent.isPresent()) { - this.tables = parent.get().tables; - } this.isLeadingDisableJoinReorder = isLeadingDisableJoinReorder; } @@ -245,12 +224,12 @@ public void toMemo() { this.memo = new Memo(getConnectContext(), plan); } - public Analyzer newAnalyzer() { - return newAnalyzer(Optional.empty()); + public TableCollector newTableCollector() { + return new TableCollector(this); } - public Analyzer newAnalyzer(Optional customTableResolver) { - return new Analyzer(this, customTableResolver); + public Analyzer newAnalyzer() { + return new Analyzer(this); } @Override @@ -266,10 +245,6 @@ public void releaseMemo() { this.memo = null; } - public void setTables(Map, TableIf> tables) { - this.tables = tables; - } - public final ConnectContext getConnectContext() { return statementContext.getConnectContext(); } @@ -344,6 +319,10 @@ public CTEContext getCteContext() { return cteContext; } + public void setCteContext(CTEContext cteContext) { + this.cteContext = cteContext; + } + public void setIsRewriteRoot(boolean isRewriteRoot) { this.isRewriteRoot = isRewriteRoot; } @@ -408,204 +387,6 @@ private CascadesContext execute(Job job) { return this; } - /** - * Extract tables. - */ - public void extractTables(LogicalPlan logicalPlan) { - Set> tableNames = getTables(logicalPlan); - tables = Maps.newHashMap(); - for (List tableName : tableNames) { - try { - TableIf table = getTable(tableName); - tables.put(table.getFullQualifiers(), table); - } catch (Throwable e) { - // IGNORE - } - } - - } - - public Map, TableIf> getTables() { - if (tables == null) { - return null; - } else { - return tables; - } - } - - private Set> getTables(LogicalPlan logicalPlan) { - final Set> tableNames = new HashSet<>(); - logicalPlan.foreach(p -> { - if (p instanceof LogicalFilter) { - tableNames.addAll(extractTableNamesFromFilter((LogicalFilter) p)); - } else if (p instanceof LogicalCTE) { - tableNames.addAll(extractTableNamesFromCTE((LogicalCTE) p)); - } else if (p instanceof LogicalProject) { - tableNames.addAll(extractTableNamesFromProject((LogicalProject) p)); - } else if (p instanceof LogicalHaving) { - tableNames.addAll(extractTableNamesFromHaving((LogicalHaving) p)); - } else if (p instanceof UnboundOneRowRelation) { - tableNames.addAll(extractTableNamesFromOneRowRelation((UnboundOneRowRelation) p)); - } else { - Set logicalPlans = p.collect( - n -> (n instanceof UnboundRelation || n instanceof UnboundTableSink)); - for (LogicalPlan plan : logicalPlans) { - if (plan instanceof UnboundRelation) { - tableNames.add(((UnboundRelation) plan).getNameParts()); - } else if (plan instanceof UnboundTableSink) { - tableNames.add(((UnboundTableSink) plan).getNameParts()); - } else { - throw new AnalysisException("get tables from plan failed. meet unknown type node " + plan); - } - } - } - }); - return tableNames; - } - - public Map, TableIf> getOrExtractTables(LogicalPlan logicalPlan) { - if (MapUtils.isEmpty(tables)) { - extractTables(logicalPlan); - } - return tables; - } - - private Set> extractTableNamesFromHaving(LogicalHaving having) { - Set subqueryExprs = having.getPredicate() - .collect(SubqueryExpr.class::isInstance); - Set> tableNames = new HashSet<>(); - for (SubqueryExpr expr : subqueryExprs) { - LogicalPlan plan = expr.getQueryPlan(); - tableNames.addAll(getTables(plan)); - } - return tableNames; - } - - private Set> extractTableNamesFromOneRowRelation(UnboundOneRowRelation oneRowRelation) { - Set subqueryExprs = oneRowRelation.getProjects().stream() - .>map(p -> p.collect(SubqueryExpr.class::isInstance)) - .flatMap(Set::stream) - .collect(Collectors.toSet()); - Set> tableNames = new HashSet<>(); - for (SubqueryExpr expr : subqueryExprs) { - LogicalPlan plan = expr.getQueryPlan(); - tableNames.addAll(getTables(plan)); - } - return tableNames; - } - - private Set> extractTableNamesFromProject(LogicalProject project) { - Set subqueryExprs = project.getProjects().stream() - .>map(p -> p.collect(SubqueryExpr.class::isInstance)) - .flatMap(Set::stream) - .collect(Collectors.toSet()); - Set> tableNames = new HashSet<>(); - for (SubqueryExpr expr : subqueryExprs) { - LogicalPlan plan = expr.getQueryPlan(); - tableNames.addAll(getTables(plan)); - } - return tableNames; - } - - private Set> extractTableNamesFromFilter(LogicalFilter filter) { - Set subqueryExprs = filter.getPredicate() - .collect(SubqueryExpr.class::isInstance); - Set> tableNames = new HashSet<>(); - for (SubqueryExpr expr : subqueryExprs) { - LogicalPlan plan = expr.getQueryPlan(); - tableNames.addAll(getTables(plan)); - } - return tableNames; - } - - private Set> extractTableNamesFromCTE(LogicalCTE cte) { - List> subQueryAliases = cte.getAliasQueries(); - Set> tableNames = new HashSet<>(); - for (LogicalSubQueryAlias subQueryAlias : subQueryAliases) { - tableNames.addAll(getTables(subQueryAlias)); - } - return tableNames; - } - - private TableIf getTable(List nameParts) { - switch (nameParts.size()) { - case 1: { // table - String ctlName = getConnectContext().getEnv().getCurrentCatalog().getName(); - String dbName = getConnectContext().getDatabase(); - return getTable(ctlName, dbName, nameParts.get(0), getConnectContext().getEnv()); - } - case 2: { // db.table - String ctlName = getConnectContext().getEnv().getCurrentCatalog().getName(); - String dbName = nameParts.get(0); - return getTable(ctlName, dbName, nameParts.get(1), getConnectContext().getEnv()); - } - case 3: { // catalog.db.table - return getTable(nameParts.get(0), nameParts.get(1), nameParts.get(2), getConnectContext().getEnv()); - } - default: - throw new IllegalStateException("Table name [" + String.join(".", nameParts) + "] is invalid."); - } - } - - /** - * Find table from catalog. - */ - public TableIf getTable(String ctlName, String dbName, String tableName, Env env) { - CatalogIf catalog = env.getCatalogMgr().getCatalog(ctlName); - if (catalog == null) { - throw new RuntimeException("Catalog [" + ctlName + "] does not exist."); - } - DatabaseIf db = catalog.getDbNullable(dbName); - if (db == null) { - throw new RuntimeException("Database [" + dbName + "] does not exist in catalog [" + ctlName + "]."); - } - - TableIf table = db.getTableNullable(tableName); - if (table == null) { - throw new RuntimeException("Table [" + tableName + "] does not exist in database [" + dbName + "]."); - } - return table; - - } - - /** - * Used to lock table - */ - public static class Lock implements AutoCloseable { - - CascadesContext cascadesContext; - private final Stack locked = new Stack<>(); - - /** - * Try to acquire read locks on tables, throw runtime exception once the acquiring for read lock failed. - */ - public Lock(LogicalPlan plan, CascadesContext cascadesContext) { - this.cascadesContext = cascadesContext; - // tables can also be load from dump file - if (cascadesContext.getTables() == null || cascadesContext.getTables().isEmpty()) { - cascadesContext.extractTables(plan); - cascadesContext.getStatementContext().setTables(cascadesContext.getTables()); - } - for (TableIf table : cascadesContext.tables.values()) { - if (!table.needReadLockWhenPlan()) { - continue; - } - if (!table.tryReadLock(1, TimeUnit.MINUTES)) { - close(); - throw new RuntimeException(String.format("Failed to get read lock on table: %s", table.getName())); - } - locked.push(table); - } - } - - @Override - public void close() { - while (!locked.empty()) { - locked.pop().readUnlock(); - } - } - } - public void putCTEIdToConsumer(LogicalCTEConsumer cteConsumer) { Set consumers = this.statementContext.getCteIdToConsumers() .computeIfAbsent(cteConsumer.getCteId(), k -> new HashSet<>()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java index 16fe1353facfb6..4eafa0e2172f96 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java @@ -30,7 +30,6 @@ import org.apache.doris.common.profile.SummaryProfile; import org.apache.doris.common.util.DebugUtil; import org.apache.doris.mysql.FieldInfo; -import org.apache.doris.nereids.CascadesContext.Lock; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.glue.LogicalPlanAdapter; import org.apache.doris.nereids.glue.translator.PhysicalPlanTranslator; @@ -156,7 +155,7 @@ public void plan(StatementBase queryStmt, org.apache.doris.thrift.TQueryOptions } @VisibleForTesting - public void planWithLock(StatementBase queryStmt) { + public void plan(StatementBase queryStmt) { try { plan(queryStmt, statementContext.getConnectContext().getSessionVariable().toThrift()); } catch (Exception e) { @@ -164,14 +163,17 @@ public void planWithLock(StatementBase queryStmt) { } } + @VisibleForTesting public PhysicalPlan planWithLock(LogicalPlan plan, PhysicalProperties outputProperties) { return (PhysicalPlan) planWithLock(plan, outputProperties, ExplainLevel.NONE, false); } + // TODO check all caller public Plan planWithLock(LogicalPlan plan, PhysicalProperties requireProperties, ExplainLevel explainLevel) { return planWithLock(plan, requireProperties, explainLevel, false); } + @VisibleForTesting public Plan planWithLock(LogicalPlan plan, PhysicalProperties requireProperties, ExplainLevel explainLevel, boolean showPlanProcess) { Consumer noCallback = p -> {}; @@ -188,9 +190,8 @@ public Plan planWithLock(LogicalPlan plan, PhysicalProperties requireProperties, * @return plan generated by this planner * @throws AnalysisException throw exception if failed in ant stage */ - public Plan planWithLock(LogicalPlan plan, PhysicalProperties requireProperties, - ExplainLevel explainLevel, boolean showPlanProcess, - Consumer lockCallback) { + private Plan planWithLock(LogicalPlan plan, PhysicalProperties requireProperties, + ExplainLevel explainLevel, boolean showPlanProcess, Consumer lockCallback) { try { long beforePlanGcTime = getGarbageCollectionTime(); if (plan instanceof LogicalSqlCache) { @@ -216,39 +217,37 @@ public Plan planWithLock(LogicalPlan plan, PhysicalProperties requireProperties, plan = preprocess(plan); initCascadesContext(plan, requireProperties); - statementContext.loadSnapshots(cascadesContext.getOrExtractTables(plan)); - try (Lock lock = new Lock(plan, cascadesContext)) { - Plan resultPlan = planWithoutLock(plan, explainLevel, showPlanProcess, requireProperties); - lockCallback.accept(resultPlan); - if (statementContext.getConnectContext().getExecutor() != null) { - statementContext.getConnectContext().getExecutor().getSummaryProfile() - .setNereidsGarbageCollectionTime(getGarbageCollectionTime() - beforePlanGcTime); - } - return resultPlan; + // collect table and lock them in the order of table id + collectAndLockTable(showAnalyzeProcess(explainLevel, showPlanProcess)); + // after table collector, we should use a new context. + statementContext.loadSnapshots(); + Plan resultPlan = planWithoutLock(plan, requireProperties, explainLevel, showPlanProcess); + lockCallback.accept(resultPlan); + if (statementContext.getConnectContext().getExecutor() != null) { + statementContext.getConnectContext().getExecutor().getSummaryProfile() + .setNereidsGarbageCollectionTime(getGarbageCollectionTime() - beforePlanGcTime); } + return resultPlan; } finally { statementContext.releasePlannerResources(); } } - protected Plan planWithoutLock( - LogicalPlan plan, ExplainLevel explainLevel, - boolean showPlanProcess, PhysicalProperties requireProperties) { - // resolve column, table and function - // analyze this query - analyze(showAnalyzeProcess(explainLevel, showPlanProcess)); + /** + * do plan but not lock any table + */ + private Plan planWithoutLock( + LogicalPlan plan, PhysicalProperties requireProperties, ExplainLevel explainLevel, + boolean showPlanProcess) { // minidump of input must be serialized first, this process ensure minidump string not null try { - MinidumpUtils.serializeInputsToDumpFile(plan, cascadesContext.getTables()); + + MinidumpUtils.serializeInputsToDumpFile(plan, statementContext); } catch (IOException e) { throw new RuntimeException(e); } - - if (statementContext.getConnectContext().getExecutor() != null) { - statementContext.getConnectContext().getExecutor().getSummaryProfile().setQueryAnalysisFinishTime(); - statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsAnalysisTime(); - } - + // analyze this query, resolve column, table and function + analyze(showAnalyzeProcess(explainLevel, showPlanProcess)); if (explainLevel == ExplainLevel.ANALYZED_PLAN || explainLevel == ExplainLevel.ALL_PLAN) { analyzedPlan = cascadesContext.getRewritePlan(); if (explainLevel == ExplainLevel.ANALYZED_PLAN) { @@ -258,10 +257,6 @@ protected Plan planWithoutLock( // rule-based optimize rewrite(showRewriteProcess(explainLevel, showPlanProcess)); - if (statementContext.getConnectContext().getExecutor() != null) { - statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsRewriteTime(); - } - if (explainLevel == ExplainLevel.REWRITTEN_PLAN || explainLevel == ExplainLevel.ALL_PLAN) { rewrittenPlan = cascadesContext.getRewritePlan(); if (explainLevel == ExplainLevel.REWRITTEN_PLAN) { @@ -269,40 +264,20 @@ protected Plan planWithoutLock( } } - // if we cannot get table row count, skip join reorder - // except: - // 1. user set leading hint - // 2. ut test. In ut test, FeConstants.enableInternalSchemaDb is false or FeConstants.runningUnitTest is true - if (FeConstants.enableInternalSchemaDb && !FeConstants.runningUnitTest - && !cascadesContext.isLeadingDisableJoinReorder()) { - List scans = cascadesContext.getRewritePlan() - .collectToList(CatalogRelation.class::isInstance); - Optional disableJoinReorderReason = StatsCalculator - .disableJoinReorderIfStatsInvalid(scans, cascadesContext); - disableJoinReorderReason.ifPresent(statementContext::setDisableJoinReorderReason); - } - - setRuntimeFilterWaitTimeByTableRowCountAndType(); - optimize(); - if (statementContext.getConnectContext().getExecutor() != null) { - statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsOptimizeTime(); - } - // print memo before choose plan. // if chooseNthPlan failed, we could get memo to debug if (cascadesContext.getConnectContext().getSessionVariable().dumpNereidsMemo) { String memo = cascadesContext.getMemo().toString(); - LOG.info(ConnectContext.get().getQueryIdentifier() + "\n" + memo); + LOG.info("{}\n{}", ConnectContext.get().getQueryIdentifier(), memo); } - int nth = cascadesContext.getConnectContext().getSessionVariable().getNthOptimizedPlan(); PhysicalPlan physicalPlan = chooseNthPlan(getRoot(), requireProperties, nth); physicalPlan = postProcess(physicalPlan); if (cascadesContext.getConnectContext().getSessionVariable().dumpNereidsMemo) { String tree = physicalPlan.treeString(); - LOG.info(ConnectContext.get().getQueryIdentifier() + "\n" + tree); + LOG.info("{}\n{}", ConnectContext.get().getQueryIdentifier(), tree); } if (explainLevel == ExplainLevel.OPTIMIZED_PLAN || explainLevel == ExplainLevel.ALL_PLAN @@ -361,8 +336,21 @@ private void setRuntimeFilterWaitTimeByTableRowCountAndType() { private void initCascadesContext(LogicalPlan plan, PhysicalProperties requireProperties) { cascadesContext = CascadesContext.initContext(statementContext, plan, requireProperties); - if (statementContext.getTables() != null) { - cascadesContext.setTables(statementContext.getTables()); + } + + protected void collectAndLockTable(boolean showPlanProcess) { + if (LOG.isDebugEnabled()) { + LOG.debug("Start collect and lock table"); + } + keepOrShowPlanProcess(showPlanProcess, () -> cascadesContext.newTableCollector().collect()); + statementContext.lock(); + cascadesContext.setCteContext(new CTEContext()); + NereidsTracer.logImportantTime("EndCollectAndLockTables"); + if (LOG.isDebugEnabled()) { + LOG.debug("End collect and lock table"); + } + if (statementContext.getConnectContext().getExecutor() != null) { + statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsLockTableFinishTime(); } } @@ -376,6 +364,11 @@ protected void analyze(boolean showPlanProcess) { if (LOG.isDebugEnabled()) { LOG.debug("End analyze plan"); } + + if (statementContext.getConnectContext().getExecutor() != null) { + statementContext.getConnectContext().getExecutor().getSummaryProfile().setQueryAnalysisFinishTime(); + statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsAnalysisTime(); + } } /** @@ -390,10 +383,26 @@ protected void rewrite(boolean showPlanProcess) { if (LOG.isDebugEnabled()) { LOG.debug("End rewrite plan"); } + if (statementContext.getConnectContext().getExecutor() != null) { + statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsRewriteTime(); + } } // DependsRules: EnsureProjectOnTopJoin.class protected void optimize() { + // if we cannot get table row count, skip join reorder + // except: + // 1. user set leading hint + // 2. ut test. In ut test, FeConstants.enableInternalSchemaDb is false or FeConstants.runningUnitTest is true + if (FeConstants.enableInternalSchemaDb && !FeConstants.runningUnitTest + && !cascadesContext.isLeadingDisableJoinReorder()) { + List scans = cascadesContext.getRewritePlan() + .collectToList(CatalogRelation.class::isInstance); + Optional disableJoinReorderReason = StatsCalculator + .disableJoinReorderIfStatsInvalid(scans, cascadesContext); + disableJoinReorderReason.ifPresent(statementContext::setDisableJoinReorderReason); + } + setRuntimeFilterWaitTimeByTableRowCountAndType(); if (LOG.isDebugEnabled()) { LOG.debug("Start optimize plan"); } @@ -402,6 +411,9 @@ protected void optimize() { if (LOG.isDebugEnabled()) { LOG.debug("End optimize plan"); } + if (statementContext.getConnectContext().getExecutor() != null) { + statementContext.getConnectContext().getExecutor().getSummaryProfile().setNereidsOptimizeTime(); + } } protected void splitFragments(PhysicalPlan resultPlan) { @@ -685,6 +697,8 @@ public String getExplainString(ExplainOptions explainOptions) { plan = "========== PARSED PLAN " + getTimeMetricString(SummaryProfile::getPrettyParseSqlTime) + " ==========\n" + parsedPlan.treeString() + "\n\n" + + "========== LOCK TABLE " + + getTimeMetricString(SummaryProfile::getPrettyNereidsLockTableTime) + " ==========\n" + "========== ANALYZED PLAN " + getTimeMetricString(SummaryProfile::getPrettyNereidsAnalysisTime) + " ==========\n" + analyzedPlan.treeString() + "\n\n" @@ -864,7 +878,7 @@ private boolean showRewriteProcess(ExplainLevel explainLevel, boolean showPlanPr } private boolean showPlanProcess(ExplainOptions explainOptions) { - return explainOptions == null ? false : explainOptions.showPlanProcess(); + return explainOptions != null && explainOptions.showPlanProcess(); } private void keepOrShowPlanProcess(boolean showPlanProcess, Runnable task) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/SqlCacheContext.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/SqlCacheContext.java index ea3e80877db329..29be4af41a7675 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/SqlCacheContext.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/SqlCacheContext.java @@ -424,6 +424,10 @@ public static class FullTableName { public String toString() { return catalog + "." + db + "." + table; } + + public List toList() { + return Lists.newArrayList(catalog, db, table); + } } /** FullColumnName */ diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/StatementContext.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/StatementContext.java index 4d60b06e7c5cc1..7717c1034bb6b4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/StatementContext.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/StatementContext.java @@ -19,6 +19,7 @@ import org.apache.doris.analysis.StatementBase; import org.apache.doris.catalog.TableIf; +import org.apache.doris.catalog.View; import org.apache.doris.catalog.constraint.TableIdentifier; import org.apache.doris.common.FormatOptions; import org.apache.doris.common.Id; @@ -42,9 +43,9 @@ import org.apache.doris.nereids.trees.plans.PlaceholderId; import org.apache.doris.nereids.trees.plans.RelationId; import org.apache.doris.nereids.trees.plans.TableId; -import org.apache.doris.nereids.trees.plans.algebra.Relation; import org.apache.doris.nereids.trees.plans.logical.LogicalCTEConsumer; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; +import org.apache.doris.nereids.util.RelationUtil; import org.apache.doris.qe.ConnectContext; import org.apache.doris.qe.OriginStatement; import org.apache.doris.qe.SessionVariable; @@ -54,7 +55,6 @@ import org.apache.doris.system.Backend; import com.google.common.annotations.VisibleForTesting; -import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; @@ -70,11 +70,13 @@ import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; +import java.util.Comparator; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.PriorityQueue; import java.util.Set; import java.util.Stack; import java.util.TreeMap; @@ -87,6 +89,18 @@ public class StatementContext implements Closeable { private static final Logger LOG = LogManager.getLogger(StatementContext.class); + /** + * indicate where the table come from. + * QUERY: in query sql directly + * INSERT_TARGET: the insert target table + * MTMV: mtmv itself and its related tables witch do not belong to this sql, but maybe used in rewrite by mtmv. + */ + public enum TableFrom { + QUERY, + INSERT_TARGET, + MTMV + } + private ConnectContext connectContext; private final Stopwatch stopwatch = Stopwatch.createUnstarted(); @@ -140,10 +154,6 @@ public class StatementContext implements Closeable { private final List hints = new ArrayList<>(); - // Map slot to its relation, currently used in SlotReference to find its original - // Relation for example LogicalOlapScan - private final Map slotToRelation = Maps.newHashMap(); - // the columns in Plan.getExpressions(), such as columns in join condition or filter condition, group by expression private final Set keySlots = Sets.newHashSet(); private BitSet disableRules; @@ -154,8 +164,17 @@ public class StatementContext implements Closeable { // placeholder params for prepared statement private List placeholders; - // tables used for plan replayer - private Map, TableIf> tables = null; + // all tables in query + private boolean needLockTables = true; + + // tables in this query directly + private final Map, TableIf> tables = Maps.newHashMap(); + // tables maybe used by mtmv rewritten in this query + private final Map, TableIf> mtmvRelatedTables = Maps.newHashMap(); + // insert into target tables + private final Map, TableIf> insertTargetTables = Maps.newHashMap(); + // save view's def and sql mode to avoid them change before lock + private final Map, Pair> viewInfos = Maps.newHashMap(); // for create view support in nereids // key is the start and end position of the sql substring that needs to be replaced, @@ -178,7 +197,7 @@ public class StatementContext implements Closeable { private FormatOptions formatOptions = FormatOptions.getDefault(); - private List plannerHooks = new ArrayList<>(); + private final List plannerHooks = new ArrayList<>(); private String disableJoinReorderReason; @@ -220,28 +239,67 @@ public StatementContext(ConnectContext connectContext, OriginStatement originSta } } + public void setNeedLockTables(boolean needLockTables) { + this.needLockTables = needLockTables; + } + + /** + * cache view info to avoid view's def and sql mode changed before lock it. + * + * @param qualifiedViewName full qualified name of the view + * @param view view need to cache info + * + * @return view info, first is view's def sql, second is view's sql mode + */ + public Pair getAndCacheViewInfo(List qualifiedViewName, View view) { + return viewInfos.computeIfAbsent(qualifiedViewName, k -> { + String viewDef; + long sqlMode; + view.readLock(); + try { + viewDef = view.getInlineViewDef(); + sqlMode = view.getSqlMode(); + } finally { + view.readUnlock(); + } + return Pair.of(viewDef, sqlMode); + }); + } + + public Map, TableIf> getInsertTargetTables() { + return insertTargetTables; + } + + public Map, TableIf> getMtmvRelatedTables() { + return mtmvRelatedTables; + } + public Map, TableIf> getTables() { - if (tables == null) { - tables = Maps.newHashMap(); - } return tables; } public void setTables(Map, TableIf> tables) { - this.tables = tables; + this.tables.clear(); + this.tables.putAll(tables); } /** get table by table name, try to get from information from dumpfile first */ - public TableIf getTableInMinidumpCache(List tableQualifier) { - if (!getConnectContext().getSessionVariable().isPlayNereidsDump()) { - return null; - } - Preconditions.checkState(tables != null, "tables should not be null"); - TableIf table = tables.getOrDefault(tableQualifier, null); - if (getConnectContext().getSessionVariable().isPlayNereidsDump() && table == null) { - throw new AnalysisException("Minidump cache can not find table:" + tableQualifier); + public TableIf getAndCacheTable(List tableQualifier, TableFrom tableFrom) { + Map, TableIf> tables; + switch (tableFrom) { + case QUERY: + tables = this.tables; + break; + case INSERT_TARGET: + tables = this.insertTargetTables; + break; + case MTMV: + tables = this.mtmvRelatedTables; + break; + default: + throw new AnalysisException("Unknown table from " + tableFrom); } - return table; + return tables.computeIfAbsent(tableQualifier, k -> RelationUtil.getTable(k, connectContext.getEnv())); } public void setConnectContext(ConnectContext connectContext) { @@ -303,10 +361,6 @@ public Optional getSqlCacheContext() { return Optional.ofNullable(sqlCacheContext); } - public void addSlotToRelation(Slot slot, Relation relation) { - slotToRelation.put(slot, relation); - } - public boolean isDpHyp() { return isDpHyp; } @@ -475,21 +529,36 @@ public Map getRelationIdToStatisticsMap() { return relationIdToStatisticsMap; } - /** addTableReadLock */ - public synchronized void addTableReadLock(TableIf tableIf) { - if (!tableIf.needReadLockWhenPlan()) { + /** + * lock all table collect by TableCollector + */ + public synchronized void lock() { + if (!needLockTables + || (tables.isEmpty() && mtmvRelatedTables.isEmpty() && insertTargetTables.isEmpty()) + || !plannerResources.isEmpty()) { return; } - if (!tableIf.tryReadLock(1, TimeUnit.MINUTES)) { - close(); - throw new RuntimeException(String.format("Failed to get read lock on table: %s", tableIf.getName())); + PriorityQueue tableIfs = new PriorityQueue<>( + tables.size() + mtmvRelatedTables.size() + insertTargetTables.size(), + Comparator.comparing(TableIf::getId)); + tableIfs.addAll(tables.values()); + tableIfs.addAll(mtmvRelatedTables.values()); + tableIfs.addAll(insertTargetTables.values()); + while (!tableIfs.isEmpty()) { + TableIf tableIf = tableIfs.poll(); + if (!tableIf.needReadLockWhenPlan()) { + continue; + } + if (!tableIf.tryReadLock(1, TimeUnit.MINUTES)) { + close(); + throw new RuntimeException("Failed to get read lock on table:" + tableIf.getName()); + } + String fullTableName = tableIf.getNameWithFullQualifiers(); + String resourceName = "tableReadLock(" + fullTableName + ")"; + plannerResources.push(new CloseableResource( + resourceName, Thread.currentThread().getName(), + originStatement == null ? null : originStatement.originStmt, tableIf::readUnlock)); } - - String fullTableName = tableIf.getNameWithFullQualifiers(); - String resourceName = "tableReadLock(" + fullTableName + ")"; - plannerResources.push(new CloseableResource( - resourceName, Thread.currentThread().getName(), - originStatement == null ? null : originStatement.originStmt, tableIf::readUnlock)); } /** releasePlannerResources */ @@ -505,7 +574,7 @@ public synchronized void releasePlannerResources() { } } if (throwable != null) { - Throwables.propagateIfInstanceOf(throwable, RuntimeException.class); + Throwables.throwIfInstanceOf(throwable, RuntimeException.class); throw new IllegalStateException("Release resource failed", throwable); } } @@ -552,13 +621,8 @@ public void addPlannerHook(PlannerHook plannerHook) { /** * Load snapshot information of mvcc - * - * @param tables Tables used in queries */ - public void loadSnapshots(Map, TableIf> tables) { - if (tables == null) { - return; - } + public void loadSnapshots() { for (TableIf tableIf : tables.values()) { if (tableIf instanceof MvccTable) { MvccTableInfo mvccTableInfo = new MvccTableInfo(tableIf); @@ -616,7 +680,7 @@ public void close() { try { resource.close(); } catch (Throwable t) { - Throwables.propagateIfInstanceOf(t, RuntimeException.class); + Throwables.throwIfInstanceOf(t, RuntimeException.class); throw new IllegalStateException("Close resource failed: " + t.getMessage(), t); } closed = true; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundBaseExternalTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundBaseExternalTableSink.java index cfdefc59872d4e..2c88e2f4a46ff7 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundBaseExternalTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundBaseExternalTableSink.java @@ -21,7 +21,6 @@ import org.apache.doris.nereids.memo.GroupExpression; import org.apache.doris.nereids.properties.LogicalProperties; import org.apache.doris.nereids.properties.UnboundLogicalProperties; -import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.plans.BlockFuncDepsPropagation; @@ -80,11 +79,6 @@ public UnboundBaseExternalTableSink withOutputExprs(List getExpressions() { - throw new UnsupportedOperationException(this.getClass().getSimpleName() + " don't support getExpression()"); - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundOneRowRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundOneRowRelation.java index 9bc368c8ad6b18..bb61bc93574208 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundOneRowRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundOneRowRelation.java @@ -71,7 +71,7 @@ public List getProjects() { @Override public List getExpressions() { - throw new UnsupportedOperationException(this.getClass().getSimpleName() + " don't support getExpression()"); + return projects; } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundRelation.java index b8d821e1548be7..12d4a7c74be58f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundRelation.java @@ -25,7 +25,6 @@ import org.apache.doris.nereids.properties.LogicalProperties; import org.apache.doris.nereids.properties.UnboundLogicalProperties; import org.apache.doris.nereids.trees.TableSample; -import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.plans.BlockFuncDepsPropagation; import org.apache.doris.nereids.trees.plans.Plan; @@ -186,11 +185,6 @@ public R accept(PlanVisitor visitor, C context) { return visitor.visitUnboundRelation(this, context); } - @Override - public List getExpressions() { - throw new UnsupportedOperationException(this.getClass().getSimpleName() + " don't support getExpression()"); - } - public List getPartNames() { return partNames; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundResultSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundResultSink.java index 5fd5c18a365d04..d57e518824d3aa 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundResultSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundResultSink.java @@ -21,7 +21,6 @@ import org.apache.doris.nereids.exceptions.UnboundException; import org.apache.doris.nereids.memo.GroupExpression; import org.apache.doris.nereids.properties.LogicalProperties; -import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.plans.BlockFuncDepsPropagation; @@ -64,11 +63,6 @@ public R accept(PlanVisitor visitor, C context) { return visitor.visitUnboundResultSink(this, context); } - @Override - public List getExpressions() { - throw new UnsupportedOperationException(this.getClass().getSimpleName() + " don't support getExpression()"); - } - @Override public Plan withGroupExpression(Optional groupExpression) { return new UnboundResultSink<>(groupExpression, Optional.of(getLogicalProperties()), child()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTVFRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTVFRelation.java index e876825af6569a..3024058edc7a5d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTVFRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTVFRelation.java @@ -21,7 +21,6 @@ import org.apache.doris.nereids.memo.GroupExpression; import org.apache.doris.nereids.properties.LogicalProperties; import org.apache.doris.nereids.properties.UnboundLogicalProperties; -import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.Properties; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.expressions.functions.table.TableValuedFunction; @@ -79,11 +78,6 @@ public R accept(PlanVisitor visitor, C context) { return visitor.visitUnboundTVFRelation(this, context); } - @Override - public List getExpressions() { - throw new UnsupportedOperationException(this.getClass().getSimpleName() + " don't support getExpression()"); - } - @Override public List computeOutput() { throw new UnboundException("output"); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java index 23c58ba42fb17e..0e528227dc9742 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java @@ -21,7 +21,6 @@ import org.apache.doris.nereids.memo.GroupExpression; import org.apache.doris.nereids.properties.LogicalProperties; import org.apache.doris.nereids.properties.UnboundLogicalProperties; -import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.plans.BlockFuncDepsPropagation; @@ -135,11 +134,6 @@ public R accept(PlanVisitor visitor, C context) { return visitor.visitUnboundTableSink(this, context); } - @Override - public List getExpressions() { - throw new UnsupportedOperationException(this.getClass().getSimpleName() + " don't support getExpression()"); - } - @Override public boolean equals(Object o) { if (this == o) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/executor/Analyzer.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/executor/Analyzer.java index 03dbb6c7110a7c..e05fead5901509 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/executor/Analyzer.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/executor/Analyzer.java @@ -24,7 +24,6 @@ import org.apache.doris.nereids.rules.analysis.AnalyzeCTE; import org.apache.doris.nereids.rules.analysis.BindExpression; import org.apache.doris.nereids.rules.analysis.BindRelation; -import org.apache.doris.nereids.rules.analysis.BindRelation.CustomTableResolver; import org.apache.doris.nereids.rules.analysis.BindSink; import org.apache.doris.nereids.rules.analysis.CheckAfterBind; import org.apache.doris.nereids.rules.analysis.CheckAnalysis; @@ -58,8 +57,6 @@ import com.google.common.collect.ImmutableSet; import java.util.List; -import java.util.Objects; -import java.util.Optional; /** * Bind symbols according to metadata in the catalog, perform semantic analysis, etc. @@ -67,38 +64,20 @@ */ public class Analyzer extends AbstractBatchJobExecutor { - public static final List ANALYZE_JOBS = buildAnalyzeJobs(Optional.empty()); - - private final List jobs; - - /** - * Execute the analysis job with scope. - * @param cascadesContext planner context for execute job - */ - public Analyzer(CascadesContext cascadesContext) { - this(cascadesContext, Optional.empty()); - } + public static final List ANALYZE_JOBS = buildAnalyzeJobs(); /** * constructor of Analyzer. For view, we only do bind relation since other analyze step will do by outer Analyzer. * * @param cascadesContext current context for analyzer - * @param customTableResolver custom resolver for outer catalog. */ - public Analyzer(CascadesContext cascadesContext, Optional customTableResolver) { + public Analyzer(CascadesContext cascadesContext) { super(cascadesContext); - Objects.requireNonNull(customTableResolver, "customTableResolver cannot be null"); - - if (customTableResolver.isPresent()) { - this.jobs = buildAnalyzeJobs(customTableResolver); - } else { - this.jobs = ANALYZE_JOBS; - } } @Override public List getJobs() { - return jobs; + return ANALYZE_JOBS; } /** @@ -108,20 +87,20 @@ public void analyze() { execute(); } - private static List buildAnalyzeJobs(Optional customTableResolver) { + private static List buildAnalyzeJobs() { return notTraverseChildrenOf( ImmutableSet.of(LogicalView.class, LogicalCTEAnchor.class), - () -> buildAnalyzerJobs(customTableResolver) + Analyzer::buildAnalyzerJobs ); } - private static List buildAnalyzerJobs(Optional customTableResolver) { + private static List buildAnalyzerJobs() { return jobs( // we should eliminate hint before "Subquery unnesting". topDown(new AnalyzeCTE()), topDown(new EliminateLogicalSelectHint()), bottomUp( - new BindRelation(customTableResolver), + new BindRelation(), new CheckPolicy() ), bottomUp(new BindExpression()), diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/executor/TableCollector.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/executor/TableCollector.java new file mode 100644 index 00000000000000..0ae433262efeb9 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/executor/TableCollector.java @@ -0,0 +1,71 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.jobs.executor; + +import org.apache.doris.nereids.CascadesContext; +import org.apache.doris.nereids.jobs.rewrite.RewriteJob; +import org.apache.doris.nereids.rules.analysis.CollectRelation; +import org.apache.doris.nereids.trees.plans.logical.LogicalView; + +import com.google.common.collect.ImmutableSet; + +import java.util.List; + +/** + * Bind symbols according to metadata in the catalog, perform semantic analysis, etc. + * TODO: revisit the interface after subquery analysis is supported. + */ +public class TableCollector extends AbstractBatchJobExecutor { + + public static final List COLLECT_JOBS = buildCollectTableJobs(); + + /** + * constructor of Analyzer. For view, we only do bind relation since other analyze step will do by outer Analyzer. + * + * @param cascadesContext current context for analyzer + */ + public TableCollector(CascadesContext cascadesContext) { + super(cascadesContext); + + } + + @Override + public List getJobs() { + return COLLECT_JOBS; + } + + /** + * nereids analyze sql. + */ + public void collect() { + execute(); + } + + private static List buildCollectTableJobs() { + return notTraverseChildrenOf( + ImmutableSet.of(LogicalView.class), + TableCollector::buildCollectorJobs + ); + } + + private static List buildCollectorJobs() { + return jobs( + topDown(new CollectRelation()) + ); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/minidump/MinidumpUtils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/minidump/MinidumpUtils.java index c0f88b25341cde..a369772f404b2c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/minidump/MinidumpUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/minidump/MinidumpUtils.java @@ -268,7 +268,7 @@ public static JSONObject executeSql(String sql) { } NereidsPlanner nereidsPlanner = new NereidsPlanner( new StatementContext(ConnectContext.get(), new OriginStatement(sql, 0))); - nereidsPlanner.planWithLock(LogicalPlanAdapter.of(parsed)); + nereidsPlanner.plan(LogicalPlanAdapter.of(parsed)); return ((AbstractPlan) nereidsPlanner.getOptimizedPlan()).toJson(); } @@ -554,10 +554,10 @@ private static JSONObject serializeInputs(Plan parsedPlan, Map, Tab /** * This function is used to serialize inputs of one query * @param parsedPlan input plan - * @param tables all tables relative to this query + * @param statementContext context for this query * @throws IOException this will write to disk, so io exception should be dealed with */ - public static void serializeInputsToDumpFile(Plan parsedPlan, Map, TableIf> tables) + public static void serializeInputsToDumpFile(Plan parsedPlan, StatementContext statementContext) throws IOException { ConnectContext connectContext = ConnectContext.get(); // when playing minidump file, we do not save input again. @@ -566,7 +566,10 @@ public static void serializeInputsToDumpFile(Plan parsedPlan, Map, } MinidumpUtils.init(); - connectContext.setMinidump(serializeInputs(parsedPlan, tables)); + Map, TableIf> allTablesUsedInQuery = Maps.newHashMap(); + allTablesUsedInQuery.putAll(statementContext.getTables()); + allTablesUsedInQuery.putAll(statementContext.getMtmvRelatedTables()); + connectContext.setMinidump(serializeInputs(parsedPlan, allTablesUsedInQuery)); } /** diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index 0332123f9ff584..bb344e1b376deb 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -916,7 +916,8 @@ public LogicalPlan visitInsertTable(InsertTableContext ctx) { command = new InsertOverwriteTableCommand(sink, labelName, cte); } else { if (ConnectContext.get() != null && ConnectContext.get().isTxnModel() - && sink.child() instanceof LogicalInlineTable) { + && sink.child() instanceof LogicalInlineTable + && sink.child().getExpressions().stream().allMatch(Expression::isConstant)) { // FIXME: In legacy, the `insert into select 1` is handled as `insert into values`. // In nereids, the original way is throw an AnalysisException and fallback to legacy. // Now handle it as `insert into select`(a separate load job), should fix it as the legacy. diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java index d348889818a5dd..4cf3c75b68dc43 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java @@ -26,9 +26,14 @@ public enum RuleType { // just for UT TEST_REWRITE(RuleTypeClass.REWRITE), - // binding rules - // **** make sure BINDING_UNBOUND_LOGICAL_PLAN is the lowest priority in the rewrite rules. **** + // collect relation rules + COLLECT_TABLE_FROM_CTE(RuleTypeClass.REWRITE), + COLLECT_TABLE_FROM_RELATION(RuleTypeClass.REWRITE), + COLLECT_TABLE_FROM_SINK(RuleTypeClass.REWRITE), + COLLECT_TABLE_FROM_OTHER(RuleTypeClass.REWRITE), + + // binding rules BINDING_RESULT_SINK(RuleTypeClass.REWRITE), BINDING_INSERT_HIVE_TABLE(RuleTypeClass.REWRITE), BINDING_INSERT_ICEBERG_TABLE(RuleTypeClass.REWRITE), diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java index c7d4e9f975e50a..583244f0902896 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java @@ -39,6 +39,7 @@ import org.apache.doris.nereids.CascadesContext; import org.apache.doris.nereids.SqlCacheContext; import org.apache.doris.nereids.StatementContext; +import org.apache.doris.nereids.StatementContext.TableFrom; import org.apache.doris.nereids.analyzer.Unbound; import org.apache.doris.nereids.analyzer.UnboundRelation; import org.apache.doris.nereids.analyzer.UnboundResultSink; @@ -98,25 +99,14 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; -import java.util.function.Function; /** * Rule to bind relations in query plan. */ public class BindRelation extends OneAnalysisRuleFactory { - private final Optional customTableResolver; + public BindRelation() {} - public BindRelation() { - this(Optional.empty()); - } - - public BindRelation(Optional customTableResolver) { - this.customTableResolver = customTableResolver; - } - - // TODO: cte will be copied to a sub-query with different names but the id of the unbound relation in them - // are the same, so we use new relation id when binding relation, and will fix this bug later. @Override public Rule build() { return unboundRelation().thenApply(ctx -> { @@ -168,23 +158,10 @@ private LogicalPlan bindWithCurrentDb(CascadesContext cascadesContext, UnboundRe return consumer; } } - List tableQualifier = RelationUtil.getQualifierName(cascadesContext.getConnectContext(), - unboundRelation.getNameParts()); - TableIf table = null; - table = ConnectContext.get().getStatementContext().getTableInMinidumpCache(tableQualifier); - if (table == null) { - if (customTableResolver.isPresent()) { - table = customTableResolver.get().apply(tableQualifier); - } - } - // In some cases even if we have already called the "cascadesContext.getTableByName", - // it also gets the null. So, we just check it in the catalog again for safety. - if (table == null) { - table = RelationUtil.getTable(tableQualifier, cascadesContext.getConnectContext().getEnv()); - } - ConnectContext.get().getStatementContext().getTables().put(tableQualifier, table); + List tableQualifier = RelationUtil.getQualifierName( + cascadesContext.getConnectContext(), unboundRelation.getNameParts()); + TableIf table = cascadesContext.getStatementContext().getAndCacheTable(tableQualifier, TableFrom.QUERY); - // TODO: should generate different Scan sub class according to table's type LogicalPlan scan = getLogicalPlan(table, unboundRelation, tableQualifier, cascadesContext); if (cascadesContext.isLeadingJoin()) { LeadingHint leading = (LeadingHint) cascadesContext.getHintMap().get("Leading"); @@ -197,17 +174,7 @@ private LogicalPlan bindWithCurrentDb(CascadesContext cascadesContext, UnboundRe private LogicalPlan bind(CascadesContext cascadesContext, UnboundRelation unboundRelation) { List tableQualifier = RelationUtil.getQualifierName(cascadesContext.getConnectContext(), unboundRelation.getNameParts()); - TableIf table = null; - if (customTableResolver.isPresent()) { - table = customTableResolver.get().apply(tableQualifier); - } - table = ConnectContext.get().getStatementContext().getTableInMinidumpCache(tableQualifier); - // In some cases even if we have already called the "cascadesContext.getTableByName", - // it also gets the null. So, we just check it in the catalog again for safety. - if (table == null) { - table = RelationUtil.getTable(tableQualifier, cascadesContext.getConnectContext().getEnv()); - } - ConnectContext.get().getStatementContext().getTables().put(tableQualifier, table); + TableIf table = cascadesContext.getStatementContext().getAndCacheTable(tableQualifier, TableFrom.QUERY); return getLogicalPlan(table, unboundRelation, tableQualifier, cascadesContext); } @@ -415,8 +382,7 @@ private LogicalPlan getLogicalPlan(TableIf table, UnboundRelation unboundRelatio case VIEW: View view = (View) table; isView = true; - String inlineViewDef = view.getInlineViewDef(); - Plan viewBody = parseAndAnalyzeView(view, inlineViewDef, cascadesContext); + Plan viewBody = parseAndAnalyzeDorisView(view, qualifiedTableName, cascadesContext); LogicalView logicalView = new LogicalView<>(view, viewBody); return new LogicalSubQueryAlias<>(qualifiedTableName, logicalView); case HMS_EXTERNAL_TABLE: @@ -496,6 +462,17 @@ private Plan parseAndAnalyzeHiveView( } } + private Plan parseAndAnalyzeDorisView(View view, List tableQualifier, CascadesContext parentContext) { + Pair viewInfo = parentContext.getStatementContext().getAndCacheViewInfo(tableQualifier, view); + long originalSqlMode = parentContext.getConnectContext().getSessionVariable().getSqlMode(); + parentContext.getConnectContext().getSessionVariable().setSqlMode(viewInfo.second); + try { + return parseAndAnalyzeView(view, viewInfo.first, parentContext); + } finally { + parentContext.getConnectContext().getSessionVariable().setSqlMode(originalSqlMode); + } + } + private Plan parseAndAnalyzeView(TableIf view, String ddlSql, CascadesContext parentContext) { parentContext.getStatementContext().addViewDdlSql(ddlSql); Optional sqlCacheContext = parentContext.getStatementContext().getSqlCacheContext(); @@ -510,7 +487,7 @@ private Plan parseAndAnalyzeView(TableIf view, String ddlSql, CascadesContext pa CascadesContext viewContext = CascadesContext.initContext( parentContext.getStatementContext(), parsedViewPlan, PhysicalProperties.ANY); viewContext.keepOrShowPlanProcess(parentContext.showPlanProcess(), () -> { - viewContext.newAnalyzer(customTableResolver).analyze(); + viewContext.newAnalyzer().analyze(); }); parentContext.addPlanProcesses(viewContext.getPlanProcesses()); // we should remove all group expression of the plan which in other memo, so the groupId would not conflict @@ -543,7 +520,4 @@ private List getPartitionIds(TableIf t, UnboundRelation unboundRelation, L return part.getId(); }).collect(ImmutableList.toImmutableList()); } - - /** CustomTableResolver */ - public interface CustomTableResolver extends Function, TableIf> {} } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java new file mode 100644 index 00000000000000..9c6e3adbe74e1b --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java @@ -0,0 +1,228 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.rules.analysis; + +import org.apache.doris.catalog.Env; +import org.apache.doris.catalog.MTMV; +import org.apache.doris.catalog.TableIf; +import org.apache.doris.catalog.View; +import org.apache.doris.common.Pair; +import org.apache.doris.mtmv.BaseTableInfo; +import org.apache.doris.nereids.CTEContext; +import org.apache.doris.nereids.CascadesContext; +import org.apache.doris.nereids.StatementContext.TableFrom; +import org.apache.doris.nereids.analyzer.UnboundRelation; +import org.apache.doris.nereids.analyzer.UnboundResultSink; +import org.apache.doris.nereids.analyzer.UnboundTableSink; +import org.apache.doris.nereids.parser.NereidsParser; +import org.apache.doris.nereids.pattern.MatchingContext; +import org.apache.doris.nereids.properties.PhysicalProperties; +import org.apache.doris.nereids.rules.Rule; +import org.apache.doris.nereids.rules.RuleType; +import org.apache.doris.nereids.trees.expressions.CTEId; +import org.apache.doris.nereids.trees.expressions.SubqueryExpr; +import org.apache.doris.nereids.trees.plans.Plan; +import org.apache.doris.nereids.trees.plans.logical.LogicalCTE; +import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; +import org.apache.doris.nereids.trees.plans.logical.LogicalSubQueryAlias; +import org.apache.doris.nereids.util.RelationUtil; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.List; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Rule to bind relations in query plan. + */ +public class CollectRelation implements AnalysisRuleFactory { + + private static final Logger LOG = LogManager.getLogger(CollectRelation.class); + + public CollectRelation() {} + + @Override + public List buildRules() { + return ImmutableList.of( + // should collect table from cte first to fill collect all cte name to avoid collect wrong table. + logicalCTE() + .thenApply(ctx -> { + ctx.cascadesContext.setCteContext(collectFromCte(ctx.root, ctx.cascadesContext)); + return null; + }) + .toRule(RuleType.COLLECT_TABLE_FROM_CTE), + unboundRelation() + .thenApply(this::collectFromUnboundRelation) + .toRule(RuleType.COLLECT_TABLE_FROM_RELATION), + unboundTableSink() + .thenApply(this::collectFromUnboundTableSink) + .toRule(RuleType.COLLECT_TABLE_FROM_SINK), + any().whenNot(UnboundRelation.class::isInstance) + .whenNot(UnboundTableSink.class::isInstance) + .thenApply(this::collectFromAny) + .toRule(RuleType.COLLECT_TABLE_FROM_OTHER) + ); + } + + /** + * register and store CTEs in CTEContext + */ + private CTEContext collectFromCte( + LogicalCTE logicalCTE, CascadesContext cascadesContext) { + CTEContext outerCteCtx = cascadesContext.getCteContext(); + List> aliasQueries = logicalCTE.getAliasQueries(); + for (LogicalSubQueryAlias aliasQuery : aliasQueries) { + // we should use a chain to ensure visible of cte + LogicalPlan parsedCtePlan = (LogicalPlan) aliasQuery.child(); + CascadesContext innerCascadesCtx = CascadesContext.newContextWithCteContext( + cascadesContext, parsedCtePlan, outerCteCtx); + innerCascadesCtx.newTableCollector().collect(); + LogicalPlan analyzedCtePlan = (LogicalPlan) innerCascadesCtx.getRewritePlan(); + // cteId is not used in CollectTable stage + CTEId cteId = new CTEId(0); + LogicalSubQueryAlias logicalSubQueryAlias = + aliasQuery.withChildren(ImmutableList.of(analyzedCtePlan)); + outerCteCtx = new CTEContext(cteId, logicalSubQueryAlias, outerCteCtx); + outerCteCtx.setAnalyzedPlan(logicalSubQueryAlias); + } + return outerCteCtx; + } + + private Plan collectFromAny(MatchingContext ctx) { + Set subqueryExprs = ctx.root.getExpressions().stream() + .>map(p -> p.collect(SubqueryExpr.class::isInstance)) + .flatMap(Set::stream) + .collect(Collectors.toSet()); + for (SubqueryExpr subqueryExpr : subqueryExprs) { + CascadesContext subqueryContext = CascadesContext.newContextWithCteContext( + ctx.cascadesContext, subqueryExpr.getQueryPlan(), ctx.cteContext); + subqueryContext.keepOrShowPlanProcess(ctx.cascadesContext.showPlanProcess(), + () -> subqueryContext.newTableCollector().collect()); + ctx.cascadesContext.addPlanProcesses(subqueryContext.getPlanProcesses()); + } + return null; + } + + private Plan collectFromUnboundTableSink(MatchingContext> ctx) { + List nameParts = ctx.root.getNameParts(); + switch (nameParts.size()) { + case 1: + // table + // Use current database name from catalog. + case 2: + // db.table + // Use database name from table name parts. + case 3: + // catalog.db.table + // Use catalog and database name from name parts. + collectFromUnboundRelation(ctx.cascadesContext, nameParts, TableFrom.INSERT_TARGET); + return null; + default: + throw new IllegalStateException("Insert target name is invalid."); + } + } + + private Plan collectFromUnboundRelation(MatchingContext ctx) { + List nameParts = ctx.root.getNameParts(); + switch (nameParts.size()) { + case 1: + // table + // Use current database name from catalog. + case 2: + // db.table + // Use database name from table name parts. + case 3: + // catalog.db.table + // Use catalog and database name from name parts. + collectFromUnboundRelation(ctx.cascadesContext, nameParts, TableFrom.QUERY); + return null; + default: + throw new IllegalStateException("Table name [" + ctx.root.getTableName() + "] is invalid."); + } + } + + private void collectFromUnboundRelation(CascadesContext cascadesContext, + List nameParts, TableFrom tableFrom) { + if (nameParts.size() == 1) { + String tableName = nameParts.get(0); + // check if it is a CTE's name + CTEContext cteContext = cascadesContext.getCteContext().findCTEContext(tableName).orElse(null); + if (cteContext != null) { + Optional analyzedCte = cteContext.getAnalyzedCTEPlan(tableName); + if (analyzedCte.isPresent()) { + return; + } + } + } + List tableQualifier = RelationUtil.getQualifierName(cascadesContext.getConnectContext(), nameParts); + TableIf table = cascadesContext.getConnectContext().getStatementContext() + .getAndCacheTable(tableQualifier, tableFrom); + LOG.info("collect table {} from {}", nameParts, tableFrom); + if (tableFrom == TableFrom.QUERY) { + collectMTMVCandidates(table, cascadesContext); + } + if (table instanceof View) { + parseAndCollectFromView(tableQualifier, (View) table, cascadesContext); + } + } + + private void collectMTMVCandidates(TableIf table, CascadesContext cascadesContext) { + if (cascadesContext.getConnectContext().getSessionVariable().enableMaterializedViewRewrite) { + Set mtmvSet = Env.getCurrentEnv().getMtmvService().getRelationManager() + .getAllMTMVs(Lists.newArrayList(new BaseTableInfo(table))); + LOG.info("table {} related mv set is {}", new BaseTableInfo(table), mtmvSet); + for (MTMV mtmv : mtmvSet) { + cascadesContext.getStatementContext().getMtmvRelatedTables().put(mtmv.getFullQualifiers(), mtmv); + mtmv.readMvLock(); + try { + for (BaseTableInfo baseTableInfo : mtmv.getRelation().getBaseTables()) { + LOG.info("mtmv {} related base table include {}", new BaseTableInfo(mtmv), baseTableInfo); + cascadesContext.getStatementContext().getAndCacheTable(baseTableInfo.toList(), TableFrom.MTMV); + } + } finally { + mtmv.readMvUnlock(); + } + } + } + } + + private void parseAndCollectFromView(List tableQualifier, View view, CascadesContext parentContext) { + Pair viewInfo = parentContext.getStatementContext().getAndCacheViewInfo(tableQualifier, view); + long originalSqlMode = parentContext.getConnectContext().getSessionVariable().getSqlMode(); + parentContext.getConnectContext().getSessionVariable().setSqlMode(viewInfo.second); + LogicalPlan parsedViewPlan; + try { + parsedViewPlan = new NereidsParser().parseSingle(viewInfo.first); + } finally { + parentContext.getConnectContext().getSessionVariable().setSqlMode(originalSqlMode); + } + if (parsedViewPlan instanceof UnboundResultSink) { + parsedViewPlan = (LogicalPlan) ((UnboundResultSink) parsedViewPlan).child(); + } + CascadesContext viewContext = CascadesContext.initContext( + parentContext.getStatementContext(), parsedViewPlan, PhysicalProperties.ANY); + viewContext.keepOrShowPlanProcess(parentContext.showPlanProcess(), + () -> viewContext.newTableCollector().collect()); + parentContext.addPlanProcesses(viewContext.getPlanProcesses()); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/AsyncMaterializationContext.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/AsyncMaterializationContext.java index 96d37ad546a7b4..593ad986ca797c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/AsyncMaterializationContext.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/AsyncMaterializationContext.java @@ -131,11 +131,6 @@ boolean isFinalChosen(Relation relation) { @Override public Plan getScanPlan(StructInfo queryInfo, CascadesContext cascadesContext) { - // If try to get scan plan or rewrite successfully, try to get mv read lock to avoid meta data inconsistent, - // try to get lock which should added before RBO - if (!this.isSuccess()) { - cascadesContext.getStatementContext().addTableReadLock(this.getMtmv()); - } super.getScanPlan(queryInfo, cascadesContext); return scanPlan; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/InitMaterializationContextHook.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/InitMaterializationContextHook.java index 4f8198e0b3c0bd..db270390f9bdd3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/InitMaterializationContextHook.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/InitMaterializationContextHook.java @@ -33,9 +33,6 @@ import org.apache.doris.nereids.NereidsPlanner; import org.apache.doris.nereids.PlannerHook; import org.apache.doris.nereids.parser.NereidsParser; -import org.apache.doris.nereids.trees.plans.Plan; -import org.apache.doris.nereids.trees.plans.visitor.TableCollector; -import org.apache.doris.nereids.trees.plans.visitor.TableCollector.TableCollectorContext; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; @@ -80,24 +77,11 @@ public void initMaterializationContext(CascadesContext cascadesContext) { */ protected void doInitMaterializationContext(CascadesContext cascadesContext) { if (cascadesContext.getConnectContext().getSessionVariable().isInDebugMode()) { - LOG.info(String.format("MaterializationContext init return because is in debug mode, current queryId is %s", - cascadesContext.getConnectContext().getQueryIdentifier())); + LOG.info("MaterializationContext init return because is in debug mode, current queryId is {}", + cascadesContext.getConnectContext().getQueryIdentifier()); return; } - // Only collect the table or mv which query use directly, to avoid useless mv partition in rewrite - // Keep use one connection context when in query, if new connect context, - // the ConnectionContext.get() will change - TableCollectorContext collectorContext = new TableCollectorContext(Sets.newHashSet(), false, - cascadesContext.getConnectContext()); - try { - Plan rewritePlan = cascadesContext.getRewritePlan(); - rewritePlan.accept(TableCollector.INSTANCE, collectorContext); - } catch (Exception e) { - LOG.warn(String.format("MaterializationContext init table collect fail, current queryId is %s", - cascadesContext.getConnectContext().getQueryIdentifier()), e); - return; - } - Set collectedTables = collectorContext.getCollectedTables(); + Set collectedTables = Sets.newHashSet(cascadesContext.getStatementContext().getTables().values()); if (collectedTables.isEmpty()) { return; } @@ -115,7 +99,7 @@ protected void doInitMaterializationContext(CascadesContext cascadesContext) { } // Create async materialization context for (MaterializationContext context : createAsyncMaterializationContext(cascadesContext, - collectorContext.getCollectedTables())) { + collectedTables)) { cascadesContext.addMaterializationContext(context); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/MaterializedViewUtils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/MaterializedViewUtils.java index 4ddb93409379e9..20aad9ecdb25c6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/MaterializedViewUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/exploration/mv/MaterializedViewUtils.java @@ -128,11 +128,10 @@ public static RelatedTableInfo getRelatedTableInfo(String column, String timeUni materializedViewPlan = new LogicalProject<>(ImmutableList.of(columnExpr), materializedViewPlan); } // Collect table relation map which is used to identify self join - List catalogRelationObjs = - materializedViewPlan.collectToList(CatalogRelation.class::isInstance); + List catalogRelations = materializedViewPlan.collectToList(CatalogRelation.class::isInstance); ImmutableMultimap.Builder tableCatalogRelationMultimapBuilder = ImmutableMultimap.builder(); - for (CatalogRelation catalogRelation : catalogRelationObjs) { + for (CatalogRelation catalogRelation : catalogRelations) { tableCatalogRelationMultimapBuilder.put(new TableIdentifier(catalogRelation.getTable()), catalogRelation); } // Check sql pattern @@ -320,6 +319,7 @@ public static MTMVCache createMTMVCache(String querySql, ConnectContext connectC LogicalPlan unboundMvPlan = new NereidsParser().parseSingle(querySql); StatementContext mvSqlStatementContext = new StatementContext(connectContext, new OriginStatement(querySql, 0)); + mvSqlStatementContext.setNeedLockTables(false); NereidsPlanner planner = new NereidsPlanner(mvSqlStatementContext); if (mvSqlStatementContext.getConnectContext().getStatementContext() == null) { mvSqlStatementContext.getConnectContext().setStatementContext(mvSqlStatementContext); @@ -771,7 +771,7 @@ public static final class RelatedTableInfo { private final String column; private final Set failReasons = new HashSet<>(); // This records the partition expression if exist - private Optional partitionExpression; + private final Optional partitionExpression; public RelatedTableInfo(BaseTableInfo tableInfo, boolean pctPossible, String column, String failReason, Expression partitionExpression) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/AddConstraintCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/AddConstraintCommand.java index 08954741c806aa..f92a3b6103b345 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/AddConstraintCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/AddConstraintCommand.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.TableIf; import org.apache.doris.common.Pair; +import org.apache.doris.common.util.MetaLockUtils; import org.apache.doris.nereids.NereidsPlanner; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.properties.PhysicalProperties; @@ -34,9 +35,12 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.util.Comparator; +import java.util.List; import java.util.Set; /** @@ -61,15 +65,26 @@ public AddConstraintCommand(String name, Constraint constraint) { @Override public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { Pair, TableIf> columnsAndTable = extractColumnsAndTable(ctx, constraint.toProject()); + List tables = Lists.newArrayList(columnsAndTable.second); + Pair, TableIf> referencedColumnsAndTable = null; if (constraint.isForeignKey()) { - Pair, TableIf> referencedColumnsAndTable - = extractColumnsAndTable(ctx, constraint.toReferenceProject()); - columnsAndTable.second.addForeignConstraint(name, columnsAndTable.first, - referencedColumnsAndTable.second, referencedColumnsAndTable.first, false); - } else if (constraint.isPrimaryKey()) { - columnsAndTable.second.addPrimaryKeyConstraint(name, columnsAndTable.first, false); - } else if (constraint.isUnique()) { - columnsAndTable.second.addUniqueConstraint(name, columnsAndTable.first, false); + referencedColumnsAndTable = extractColumnsAndTable(ctx, constraint.toReferenceProject()); + tables.add(referencedColumnsAndTable.second); + } + tables.sort((Comparator.comparing(TableIf::getId))); + MetaLockUtils.writeLockTables(tables); + try { + if (constraint.isForeignKey()) { + Preconditions.checkState(referencedColumnsAndTable != null); + columnsAndTable.second.addForeignConstraint(name, columnsAndTable.first, + referencedColumnsAndTable.second, referencedColumnsAndTable.first, false); + } else if (constraint.isPrimaryKey()) { + columnsAndTable.second.addPrimaryKeyConstraint(name, columnsAndTable.first, false); + } else if (constraint.isUnique()) { + columnsAndTable.second.addUniqueConstraint(name, columnsAndTable.first, false); + } + } finally { + MetaLockUtils.writeUnlockTables(tables); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CommandUtils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CommandUtils.java deleted file mode 100644 index f9b0c3e18d1b2f..00000000000000 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/CommandUtils.java +++ /dev/null @@ -1,49 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.nereids.trees.plans.commands; - -import org.apache.doris.catalog.KeysType; -import org.apache.doris.catalog.OlapTable; -import org.apache.doris.catalog.TableIf; -import org.apache.doris.nereids.exceptions.AnalysisException; -import org.apache.doris.nereids.util.RelationUtil; -import org.apache.doris.qe.ConnectContext; - -import java.util.List; - -/** - * delete from unique key table. - */ -public class CommandUtils { - - /** - * check delete target table should unique key olap table. If ok, return it. - */ - public static OlapTable checkAndGetDeleteTargetTable(ConnectContext ctx, List nameParts) { - List qualifiedTableName = RelationUtil.getQualifierName(ctx, nameParts); - TableIf table = RelationUtil.getTable(qualifiedTableName, ctx.getEnv()); - if (!(table instanceof OlapTable)) { - throw new AnalysisException("table must be olapTable in delete command"); - } - OlapTable targetTable = ((OlapTable) table); - if (targetTable.getKeysType() != KeysType.UNIQUE_KEYS) { - throw new AnalysisException("Nereids only support delete command on unique key table now"); - } - return targetTable; - } -} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropConstraintCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropConstraintCommand.java index fe777ea82972fc..63d432121599e0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropConstraintCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropConstraintCommand.java @@ -18,6 +18,9 @@ package org.apache.doris.nereids.trees.plans.commands; import org.apache.doris.catalog.TableIf; +import org.apache.doris.catalog.constraint.Constraint; +import org.apache.doris.catalog.constraint.PrimaryKeyConstraint; +import org.apache.doris.common.util.MetaLockUtils; import org.apache.doris.nereids.NereidsPlanner; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.properties.PhysicalProperties; @@ -30,9 +33,12 @@ import org.apache.doris.qe.ConnectContext; import org.apache.doris.qe.StmtExecutor; +import com.google.common.collect.Lists; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.util.Comparator; +import java.util.List; import java.util.Set; /** @@ -56,7 +62,27 @@ public DropConstraintCommand(String name, LogicalPlan plan) { @Override public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { TableIf table = extractTable(ctx, plan); - table.dropConstraint(name, false); + List tables = Lists.newArrayList(table); + table.readLock(); + try { + Constraint constraint = table.getConstraintsMapUnsafe().get(name); + if (constraint == null) { + throw new AnalysisException( + String.format("Unknown constraint %s on table %s.", name, table.getName())); + } + if (constraint instanceof PrimaryKeyConstraint) { + tables.addAll(((PrimaryKeyConstraint) constraint).getForeignTables()); + } + } finally { + table.readUnlock(); + } + tables.sort((Comparator.comparing(TableIf::getId))); + MetaLockUtils.writeLockTables(tables); + try { + table.dropConstraint(name, false); + } finally { + MetaLockUtils.writeUnlockTables(tables); + } } private TableIf extractTable(ConnectContext ctx, LogicalPlan plan) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowConstraintsCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowConstraintsCommand.java index 5c3c16137bfe80..0e5c332058d481 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowConstraintsCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowConstraintsCommand.java @@ -52,12 +52,19 @@ public ShowConstraintsCommand(List nameParts) { public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { TableIf tableIf = RelationUtil.getDbAndTable( RelationUtil.getQualifierName(ctx, nameParts), ctx.getEnv()).value(); - List> res = tableIf.getConstraintsMap().entrySet().stream() - .map(e -> Lists.newArrayList(e.getKey(), - e.getValue().getType().getName(), - e.getValue().toString())) + tableIf.readLock(); + List> res; + try { + res = tableIf.getConstraintsMap().entrySet().stream() + .map(e -> Lists.newArrayList(e.getKey(), + e.getValue().getType().getName(), + e.getValue().toString())) .collect(Collectors.toList()); + } finally { + tableIf.readUnlock(); + } executor.handleShowConstraintStmt(res); + } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateMTMVInfo.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateMTMVInfo.java index 417ab807cfe4bb..349379285790fa 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateMTMVInfo.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/CreateMTMVInfo.java @@ -250,42 +250,43 @@ private void analyzeProperties() { /** * analyzeQuery */ - public void analyzeQuery(ConnectContext ctx, Map mvProperties) throws Exception { - // create table as select - StatementContext statementContext = ctx.getStatementContext(); - NereidsPlanner planner = new NereidsPlanner(statementContext); - // this is for expression column name infer when not use alias - LogicalSink logicalSink = new UnboundResultSink<>(logicalQuery); - // Should not make table without data to empty relation when analyze the related table, - // so add disable rules - Set tempDisableRules = ctx.getSessionVariable().getDisableNereidsRuleNames(); - ctx.getSessionVariable().setDisableNereidsRules(CreateMTMVInfo.MTMV_PLANER_DISABLE_RULES); - ctx.getStatementContext().invalidCache(SessionVariable.DISABLE_NEREIDS_RULES); - Plan plan; - try { - // must disable constant folding by be, because be constant folding may return wrong type - ctx.getSessionVariable().setVarOnce(SessionVariable.ENABLE_FOLD_CONSTANT_BY_BE, "false"); - plan = planner.planWithLock(logicalSink, PhysicalProperties.ANY, ExplainLevel.ALL_PLAN); - } finally { - // after operate, roll back the disable rules - ctx.getSessionVariable().setDisableNereidsRules(String.join(",", tempDisableRules)); - ctx.getStatementContext().invalidCache(SessionVariable.DISABLE_NEREIDS_RULES); - } - // can not contain VIEW or MTMV - analyzeBaseTables(planner.getAnalyzedPlan()); - // can not contain Random function - analyzeExpressions(planner.getAnalyzedPlan(), mvProperties); - // can not contain partition or tablets - boolean containTableQueryOperator = MaterializedViewUtils.containTableQueryOperator(planner.getAnalyzedPlan()); - if (containTableQueryOperator) { - throw new AnalysisException("can not contain invalid expression"); - } - getRelation(planner); - this.mvPartitionInfo = mvPartitionDefinition.analyzeAndTransferToMTMVPartitionInfo(planner, ctx); - this.partitionDesc = generatePartitionDesc(ctx); - getColumns(plan, ctx, mvPartitionInfo.getPartitionCol(), distribution); - analyzeKeys(); + public void analyzeQuery(ConnectContext ctx, Map mvProperties) { + try (StatementContext statementContext = ctx.getStatementContext()) { + NereidsPlanner planner = new NereidsPlanner(statementContext); + // this is for expression column name infer when not use alias + LogicalSink logicalSink = new UnboundResultSink<>(logicalQuery); + // Should not make table without data to empty relation when analyze the related table, + // so add disable rules + Set tempDisableRules = ctx.getSessionVariable().getDisableNereidsRuleNames(); + ctx.getSessionVariable().setDisableNereidsRules(CreateMTMVInfo.MTMV_PLANER_DISABLE_RULES); + statementContext.invalidCache(SessionVariable.DISABLE_NEREIDS_RULES); + Plan plan; + try { + // must disable constant folding by be, because be constant folding may return wrong type + ctx.getSessionVariable().setVarOnce(SessionVariable.ENABLE_FOLD_CONSTANT_BY_BE, "false"); + plan = planner.planWithLock(logicalSink, PhysicalProperties.ANY, ExplainLevel.ALL_PLAN); + } finally { + // after operate, roll back the disable rules + ctx.getSessionVariable().setDisableNereidsRules(String.join(",", tempDisableRules)); + statementContext.invalidCache(SessionVariable.DISABLE_NEREIDS_RULES); + } + // can not contain VIEW or MTMV + analyzeBaseTables(planner.getAnalyzedPlan()); + // can not contain Random function + analyzeExpressions(planner.getAnalyzedPlan(), mvProperties); + // can not contain partition or tablets + boolean containTableQueryOperator = MaterializedViewUtils.containTableQueryOperator( + planner.getAnalyzedPlan()); + if (containTableQueryOperator) { + throw new AnalysisException("can not contain invalid expression"); + } + getRelation(Sets.newHashSet(statementContext.getTables().values()), ctx); + this.mvPartitionInfo = mvPartitionDefinition.analyzeAndTransferToMTMVPartitionInfo(planner); + this.partitionDesc = generatePartitionDesc(ctx); + getColumns(plan, ctx, mvPartitionInfo.getPartitionCol(), distribution); + analyzeKeys(); + } } private void analyzeKeys() { @@ -327,8 +328,8 @@ private void analyzeKeys() { } // Should use analyzed plan for collect views and tables - private void getRelation(NereidsPlanner planner) { - this.relation = MTMVPlanUtil.generateMTMVRelation(planner.getAnalyzedPlan(), planner.getConnectContext()); + private void getRelation(Set tables, ConnectContext ctx) { + this.relation = MTMVPlanUtil.generateMTMVRelation(tables, ctx); } private PartitionDesc generatePartitionDesc(ConnectContext ctx) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java index a26a97f7240793..c2e9abd2f0f97c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java @@ -44,7 +44,6 @@ import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.expressions.functions.scalar.DateTrunc; import org.apache.doris.nereids.trees.expressions.literal.Literal; -import org.apache.doris.qe.ConnectContext; import com.google.common.collect.Sets; @@ -66,10 +65,9 @@ public class MTMVPartitionDefinition { * analyzeAndTransferToMTMVPartitionInfo * * @param planner planner - * @param ctx ctx * @return MTMVPartitionInfo */ - public MTMVPartitionInfo analyzeAndTransferToMTMVPartitionInfo(NereidsPlanner planner, ConnectContext ctx) { + public MTMVPartitionInfo analyzeAndTransferToMTMVPartitionInfo(NereidsPlanner planner) { MTMVPartitionInfo mtmvPartitionInfo = new MTMVPartitionInfo(partitionType); if (this.partitionType == MTMVPartitionType.SELF_MANAGE) { return mtmvPartitionInfo; @@ -77,9 +75,8 @@ public MTMVPartitionInfo analyzeAndTransferToMTMVPartitionInfo(NereidsPlanner pl String partitionColName; String timeUnit; if (this.partitionType == MTMVPartitionType.EXPR) { - String functionName = ((UnboundFunction) functionCallExpression).getName(); - if (functionCallExpression instanceof UnboundFunction - && functionName.equalsIgnoreCase(PARTITION_BY_FUNCTION_NAME)) { + if (functionCallExpression instanceof UnboundFunction && PARTITION_BY_FUNCTION_NAME + .equalsIgnoreCase(((UnboundFunction) functionCallExpression).getName())) { partitionColName = functionCallExpression.getArgument(0) instanceof UnboundSlot ? ((UnboundSlot) functionCallExpression.getArgument(0)).getName() : null; timeUnit = functionCallExpression.getArguments().get(1).isLiteral() @@ -93,7 +90,7 @@ public MTMVPartitionInfo analyzeAndTransferToMTMVPartitionInfo(NereidsPlanner pl timeUnit = null; } mtmvPartitionInfo.setPartitionCol(partitionColName); - RelatedTableInfo relatedTableInfo = getRelatedTableInfo(planner, ctx, partitionColName, timeUnit); + RelatedTableInfo relatedTableInfo = getRelatedTableInfo(planner, partitionColName, timeUnit); mtmvPartitionInfo.setRelatedCol(relatedTableInfo.getColumn()); mtmvPartitionInfo.setRelatedTable(relatedTableInfo.getTableInfo()); if (relatedTableInfo.getPartitionExpression().isPresent()) { @@ -119,8 +116,7 @@ public MTMVPartitionInfo analyzeAndTransferToMTMVPartitionInfo(NereidsPlanner pl } // Should use rewritten plan without view and subQuery to get related partition table - private RelatedTableInfo getRelatedTableInfo(NereidsPlanner planner, ConnectContext ctx, - String partitionColName, String timeUnit) { + private RelatedTableInfo getRelatedTableInfo(NereidsPlanner planner, String partitionColName, String timeUnit) { CascadesContext cascadesContext = planner.getCascadesContext(); RelatedTableInfo relatedTableInfo = MaterializedViewUtils @@ -129,10 +125,10 @@ private RelatedTableInfo getRelatedTableInfo(NereidsPlanner planner, ConnectCont throw new AnalysisException(String.format("Unable to find a suitable base table for partitioning," + " the fail reason is %s", relatedTableInfo.getFailReason())); } - MTMVRelatedTableIf mtmvBaseRealtedTable = MTMVUtil.getRelatedTable(relatedTableInfo.getTableInfo()); + MTMVRelatedTableIf mtmvBaseRelatedTable = MTMVUtil.getRelatedTable(relatedTableInfo.getTableInfo()); Set partitionColumnNames = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); try { - partitionColumnNames.addAll(mtmvBaseRealtedTable.getPartitionColumnNames(Optional.empty())); + partitionColumnNames.addAll(mtmvBaseRelatedTable.getPartitionColumnNames(Optional.empty())); } catch (DdlException e) { throw new AnalysisException(e.getMessage(), e); } @@ -140,7 +136,7 @@ private RelatedTableInfo getRelatedTableInfo(NereidsPlanner planner, ConnectCont if (!partitionColumnNames.contains(relatedTableInfo.getColumn())) { throw new AnalysisException("error related column: " + relatedTableInfo.getColumn()); } - if (!(mtmvBaseRealtedTable instanceof HMSExternalTable) + if (!(mtmvBaseRelatedTable instanceof HMSExternalTable) && partitionColumnNames.size() != 1) { throw new AnalysisException("only hms table support multi column partition."); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java index 0999c4baa79e3b..10f9947974cdb0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java @@ -25,6 +25,7 @@ import org.apache.doris.common.ErrorCode; import org.apache.doris.common.ErrorReport; import org.apache.doris.common.profile.ProfileManager.ProfileType; +import org.apache.doris.common.util.DebugUtil; import org.apache.doris.datasource.hive.HMSExternalTable; import org.apache.doris.datasource.iceberg.IcebergExternalTable; import org.apache.doris.datasource.jdbc.JdbcExternalTable; @@ -51,6 +52,7 @@ import org.apache.doris.nereids.trees.plans.physical.PhysicalSink; import org.apache.doris.nereids.trees.plans.physical.PhysicalUnion; import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.nereids.util.RelationUtil; import org.apache.doris.planner.DataSink; import org.apache.doris.qe.ConnectContext; import org.apache.doris.qe.ConnectContext.ConnectType; @@ -83,13 +85,14 @@ public class InsertIntoTableCommand extends Command implements ForwardWithSync, public static final Logger LOG = LogManager.getLogger(InsertIntoTableCommand.class); + private LogicalPlan originalLogicalQuery; private LogicalPlan logicalQuery; private Optional labelName; /** * When source it's from job scheduler,it will be set. */ private long jobId; - private Optional insertCtx; + private final Optional insertCtx; private final Optional cte; /** @@ -98,7 +101,8 @@ public class InsertIntoTableCommand extends Command implements ForwardWithSync, public InsertIntoTableCommand(LogicalPlan logicalQuery, Optional labelName, Optional insertCtx, Optional cte) { super(PlanType.INSERT_INTO_TABLE_COMMAND); - this.logicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.originalLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.logicalQuery = originalLogicalQuery; this.labelName = Objects.requireNonNull(labelName, "labelName should not be null"); this.insertCtx = insertCtx; this.cte = cte; @@ -145,62 +149,95 @@ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor executor */ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor stmtExecutor, boolean needBeginTransaction) throws Exception { - TableIf targetTableIf = InsertUtils.getTargetTable(logicalQuery, ctx); - // check auth - if (!Env.getCurrentEnv().getAccessManager() - .checkTblPriv(ConnectContext.get(), targetTableIf.getDatabase().getCatalog().getName(), - targetTableIf.getDatabase().getFullName(), targetTableIf.getName(), - PrivPredicate.LOAD)) { - ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD", - ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(), - targetTableIf.getDatabase().getFullName() + "." + targetTableIf.getName()); + List qualifiedTargetTableName = InsertUtils.getTargetTableQualified(logicalQuery, ctx); + + AbstractInsertExecutor insertExecutor; + int retryTimes = 0; + while (++retryTimes < Math.max(ctx.getSessionVariable().dmlPlanRetryTimes, 3)) { + TableIf targetTableIf = RelationUtil.getTable(qualifiedTargetTableName, ctx.getEnv()); + // check auth + if (!Env.getCurrentEnv().getAccessManager() + .checkTblPriv(ConnectContext.get(), targetTableIf.getDatabase().getCatalog().getName(), + targetTableIf.getDatabase().getFullName(), targetTableIf.getName(), + PrivPredicate.LOAD)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD", + ConnectContext.get().getQualifiedUser(), ConnectContext.get().getRemoteIP(), + targetTableIf.getDatabase().getFullName() + "." + targetTableIf.getName()); + } + BuildInsertExecutorResult buildResult; + try { + buildResult = initPlanOnce(ctx, stmtExecutor, targetTableIf); + } catch (Throwable e) { + Throwables.throwIfInstanceOf(e, RuntimeException.class); + throw new IllegalStateException(e.getMessage(), e); + } + insertExecutor = buildResult.executor; + if (!needBeginTransaction) { + return insertExecutor; + } + + // lock after plan and check does table's schema changed to ensure we lock table order by id. + TableIf newestTargetTableIf = RelationUtil.getTable(qualifiedTargetTableName, ctx.getEnv()); + newestTargetTableIf.readLock(); + try { + if (targetTableIf.getId() != newestTargetTableIf.getId()) { + LOG.warn("insert plan failed {} times. query id is {}. table id changed from {} to {}", + retryTimes, DebugUtil.printId(ctx.queryId()), + targetTableIf.getId(), newestTargetTableIf.getId()); + continue; + } + if (!targetTableIf.getFullSchema().equals(newestTargetTableIf.getFullSchema())) { + LOG.warn("insert plan failed {} times. query id is {}. table schema changed from {} to {}", + retryTimes, DebugUtil.printId(ctx.queryId()), + targetTableIf.getFullSchema(), newestTargetTableIf.getFullSchema()); + continue; + } + if (!insertExecutor.isEmptyInsert()) { + insertExecutor.beginTransaction(); + insertExecutor.finalizeSink( + buildResult.planner.getFragments().get(0), buildResult.dataSink, + buildResult.physicalSink + ); + } + newestTargetTableIf.readUnlock(); + } catch (Throwable e) { + newestTargetTableIf.readUnlock(); + // the abortTxn in onFail need to acquire table write lock + if (insertExecutor != null) { + insertExecutor.onFail(e); + } + Throwables.throwIfInstanceOf(e, RuntimeException.class); + throw new IllegalStateException(e.getMessage(), e); + } + stmtExecutor.setProfileType(ProfileType.LOAD); + // We exposed @StmtExecutor#cancel as a unified entry point for statement interruption, + // so we need to set this here + insertExecutor.getCoordinator().setTxnId(insertExecutor.getTxnId()); + stmtExecutor.setCoord(insertExecutor.getCoordinator()); + // for prepare and execute, avoiding normalization for every execute command + this.originalLogicalQuery = this.logicalQuery; + return insertExecutor; } + LOG.warn("insert plan failed {} times. query id is {}.", retryTimes, DebugUtil.printId(ctx.queryId())); + throw new AnalysisException("Insert plan failed. Could not get target table lock."); + } - AbstractInsertExecutor insertExecutor = null; - // should lock target table until we begin transaction. + private BuildInsertExecutorResult initPlanOnce(ConnectContext ctx, + StmtExecutor stmtExecutor, TableIf targetTableIf) throws Throwable { targetTableIf.readLock(); try { - // 1. process inline table (default values, empty values) - this.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(logicalQuery, targetTableIf, insertCtx); + // process inline table (default values, empty values) + this.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(originalLogicalQuery, targetTableIf, insertCtx); if (cte.isPresent()) { this.logicalQuery = ((LogicalPlan) cte.get().withChildren(logicalQuery)); } OlapGroupCommitInsertExecutor.analyzeGroupCommit(ctx, targetTableIf, this.logicalQuery, this.insertCtx); - LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext()); - - BuildInsertExecutorResult buildResult = planInsertExecutor( - ctx, stmtExecutor, logicalPlanAdapter, targetTableIf - ); - - insertExecutor = buildResult.executor; - - if (!needBeginTransaction) { - targetTableIf.readUnlock(); - return insertExecutor; - } - if (!insertExecutor.isEmptyInsert()) { - insertExecutor.beginTransaction(); - insertExecutor.finalizeSink( - buildResult.planner.getFragments().get(0), buildResult.dataSink, buildResult.physicalSink - ); - } - targetTableIf.readUnlock(); - } catch (Throwable e) { + } finally { targetTableIf.readUnlock(); - // the abortTxn in onFail need to acquire table write lock - if (insertExecutor != null) { - insertExecutor.onFail(e); - } - Throwables.propagateIfInstanceOf(e, RuntimeException.class); - throw new IllegalStateException(e.getMessage(), e); } - stmtExecutor.setProfileType(ProfileType.LOAD); - // We exposed @StmtExecutor#cancel as a unified entry point for statement interruption, - // so we need to set this here - insertExecutor.getCoordinator().setTxnId(insertExecutor.getTxnId()); - stmtExecutor.setCoord(insertExecutor.getCoordinator()); - return insertExecutor; + LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext()); + return planInsertExecutor(ctx, stmtExecutor, logicalPlanAdapter, targetTableIf); } // we should select the factory type first, but we can not initial InsertExecutor at this time, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java index 60e7e5bf805a64..459ffcd04f894a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java @@ -425,6 +425,14 @@ private static Expression castValue(Expression value, DataType targetType) { * get target table from names. */ public static TableIf getTargetTable(Plan plan, ConnectContext ctx) { + List tableQualifier = getTargetTableQualified(plan, ctx); + return RelationUtil.getTable(tableQualifier, ctx.getEnv()); + } + + /** + * get target table from names. + */ + public static List getTargetTableQualified(Plan plan, ConnectContext ctx) { UnboundLogicalSink unboundTableSink; if (plan instanceof UnboundTableSink) { unboundTableSink = (UnboundTableSink) plan; @@ -439,8 +447,7 @@ public static TableIf getTargetTable(Plan plan, ConnectContext ctx) { + " [UnboundTableSink, UnboundHiveTableSink, UnboundIcebergTableSink]," + " but it is " + plan.getType()); } - List tableQualifier = RelationUtil.getQualifierName(ctx, unboundTableSink.getNameParts()); - return RelationUtil.getDbAndTable(tableQualifier, ctx.getEnv()).second; + return RelationUtil.getQualifierName(ctx, unboundTableSink.getNameParts()); } private static NamedExpression generateDefaultExpression(Column column) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/TableCollector.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/TableCollector.java deleted file mode 100644 index 27ff1e4b68c075..00000000000000 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/TableCollector.java +++ /dev/null @@ -1,122 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.nereids.trees.plans.visitor; - -import org.apache.doris.catalog.MTMV; -import org.apache.doris.catalog.TableIf; -import org.apache.doris.catalog.TableIf.TableType; -import org.apache.doris.common.AnalysisException; -import org.apache.doris.mtmv.MTMVCache; -import org.apache.doris.nereids.trees.plans.Plan; -import org.apache.doris.nereids.trees.plans.logical.LogicalCatalogRelation; -import org.apache.doris.nereids.trees.plans.physical.PhysicalCatalogRelation; -import org.apache.doris.nereids.trees.plans.visitor.TableCollector.TableCollectorContext; -import org.apache.doris.qe.ConnectContext; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; - -import java.util.HashSet; -import java.util.Set; - -/** - * Collect the table in plan - * Note: will not get table if table is eliminated by EmptyRelation in rewrite. - * View expand is in RBO, if call this method with the plan after RBO, this will get base tables in view, or will not. - * Materialized view is extended or not can be controlled by the field expand - */ -public class TableCollector extends DefaultPlanVisitor { - - public static final TableCollector INSTANCE = new TableCollector(); - private static final Logger LOG = LogManager.getLogger(TableCollector.class); - - @Override - public Plan visitLogicalCatalogRelation(LogicalCatalogRelation catalogRelation, TableCollectorContext context) { - TableIf table = catalogRelation.getTable(); - if (context.getTargetTableTypes().isEmpty() || context.getTargetTableTypes().contains(table.getType())) { - context.getCollectedTables().add(table); - } - if (table instanceof MTMV) { - expandMvAndCollect((MTMV) table, context); - } - return catalogRelation; - } - - @Override - public Plan visitPhysicalCatalogRelation(PhysicalCatalogRelation catalogRelation, TableCollectorContext context) { - TableIf table = catalogRelation.getTable(); - if (context.getTargetTableTypes().isEmpty() || context.getTargetTableTypes().contains(table.getType())) { - context.getCollectedTables().add(table); - } - if (table instanceof MTMV) { - expandMvAndCollect((MTMV) table, context); - } - return catalogRelation; - } - - private void expandMvAndCollect(MTMV mtmv, TableCollectorContext context) { - if (!context.isExpandMaterializedView()) { - return; - } - // Make sure use only one connection context when in query to avoid ConnectionContext.get() wrong - MTMVCache expandedMvCache; - try { - expandedMvCache = mtmv.getOrGenerateCache(context.getConnectContext()); - } catch (AnalysisException exception) { - LOG.warn(String.format("expandMvAndCollect getOrGenerateCache fail, mtmv name is %s", mtmv.getName()), - exception); - expandedMvCache = MTMVCache.from(mtmv, context.getConnectContext(), false); - } - expandedMvCache.getAnalyzedPlan().accept(this, context); - } - - /** - * The context for table collecting, it contains the target collect table types - * and the result of collect. - */ - public static final class TableCollectorContext { - private final Set collectedTables = new HashSet<>(); - private final Set targetTableTypes; - // if expand the mv or not - private final boolean expandMaterializedView; - private final ConnectContext connectContext; - - public TableCollectorContext(Set targetTableTypes, boolean expandMaterializedView, - ConnectContext connectContext) { - this.targetTableTypes = targetTableTypes; - this.expandMaterializedView = expandMaterializedView; - this.connectContext = connectContext; - } - - public Set getCollectedTables() { - return collectedTables; - } - - public Set getTargetTableTypes() { - return targetTableTypes; - } - - public boolean isExpandMaterializedView() { - return expandMaterializedView; - } - - public ConnectContext getConnectContext() { - return connectContext; - } - } -} diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index 126ed1135e9dc0..75f21c786b8c37 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -141,6 +141,7 @@ public class SessionVariable implements Serializable, Writable { public static final String PARALLEL_PIPELINE_TASK_NUM = "parallel_pipeline_task_num"; public static final String PROFILE_LEVEL = "profile_level"; public static final String MAX_INSTANCE_NUM = "max_instance_num"; + public static final String DML_PLAN_RETRY_TIMES = "DML_PLAN_RETRY_TIMES"; public static final String ENABLE_INSERT_STRICT = "enable_insert_strict"; public static final String INSERT_MAX_FILTER_RATIO = "insert_max_filter_ratio"; @@ -1008,6 +1009,17 @@ public enum IgnoreSplitType { @VariableMgr.VarAttr(name = MAX_INSTANCE_NUM) public int maxInstanceNum = 64; + @VariableMgr.VarAttr(name = DML_PLAN_RETRY_TIMES, needForward = true, description = { + "写入规划的最大重试次数。为了避免死锁,写入规划时采用了分阶段加锁。当在两次加锁中间,表结构发生变更时,会尝试重新规划。" + + "此变量限制重新规划的最大尝试次数。", + "Maximum retry attempts for write planning. To avoid deadlocks, " + + "phased locking is adopted during write planning. " + + "When changes occur to the table structure between two locking phases, " + + "re-planning will be attempted. " + + "This variable limits the maximum number of retry attempts for re-planning." + }) + public int dmlPlanRetryTimes = 3; + @VariableMgr.VarAttr(name = ENABLE_INSERT_STRICT, needForward = true) public boolean enableInsertStrict = true; diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java index 5c2566225fe50a..e757f3153db038 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/StmtExecutor.java @@ -237,6 +237,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -1357,6 +1358,7 @@ public void analyze(TQueryOptions tQueryOptions) throws UserException, Interrupt } // table id in tableList is in ascending order because that table map is a sorted map List tables = Lists.newArrayList(tableMap.values()); + tables.sort((Comparator.comparing(TableIf::getId))); int analyzeTimes = 2; if (Config.isCloudMode()) { // be core and be restarted, need retry more times @@ -2387,6 +2389,7 @@ private void handleInsertStmt() throws Exception { response.getStatus(), i); if (i < maxRetry) { List tables = Lists.newArrayList(insertStmt.getTargetTable()); + tables.sort((Comparator.comparing(TableIf::getId))); MetaLockUtils.readLockTables(tables); try { insertStmt.reset(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/MetadataGenerator.java b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/MetadataGenerator.java index 01eb92b9be3f40..5f6c12d8eeb477 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/MetadataGenerator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/MetadataGenerator.java @@ -43,6 +43,7 @@ import org.apache.doris.common.UserException; import org.apache.doris.common.proc.FrontendsProcNode; import org.apache.doris.common.proc.PartitionsProcDir; +import org.apache.doris.common.util.MetaLockUtils; import org.apache.doris.common.util.NetUtils; import org.apache.doris.common.util.TimeUtils; import org.apache.doris.common.util.Util; @@ -62,7 +63,10 @@ import org.apache.doris.job.common.JobType; import org.apache.doris.job.extensions.mtmv.MTMVJob; import org.apache.doris.job.task.AbstractTask; +import org.apache.doris.mtmv.BaseTableInfo; import org.apache.doris.mtmv.MTMVPartitionUtil; +import org.apache.doris.mtmv.MTMVStatus; +import org.apache.doris.mtmv.MTMVUtil; import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.plsql.metastore.PlsqlManager; import org.apache.doris.plsql.metastore.PlsqlProcedureKey; @@ -111,6 +115,7 @@ import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Collection; +import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.Map; @@ -844,22 +849,42 @@ private static TFetchSchemaTableDataResult mtmvMetadataResult(TMetadataTableRequ } MTMV mv = (MTMV) table; if (LOG.isDebugEnabled()) { - LOG.debug("mv: " + mv.toInfoString()); + LOG.debug("mv: {}", mv.toInfoString()); } + List needLocked = Lists.newArrayList(); + needLocked.add(mv); + boolean alwaysNotSync = false; + try { + for (BaseTableInfo baseTableInfo : mv.getRelation().getBaseTables()) { + TableIf baseTable = MTMVUtil.getTable(baseTableInfo); + needLocked.add(baseTable); + } + } catch (Exception e) { + alwaysNotSync = true; + } + needLocked.sort(Comparator.comparing(TableIf::getId)); + MetaLockUtils.readLockTables(needLocked); + boolean isSync; + try { + isSync = !alwaysNotSync && MTMVPartitionUtil.isMTMVSync(mv); + } finally { + MetaLockUtils.readUnlockTables(needLocked); + } + MTMVStatus mtmvStatus = mv.getStatus(); TRow trow = new TRow(); trow.addToColumnValue(new TCell().setLongVal(mv.getId())); trow.addToColumnValue(new TCell().setStringVal(mv.getName())); trow.addToColumnValue(new TCell().setStringVal(mv.getJobInfo().getJobName())); - trow.addToColumnValue(new TCell().setStringVal(mv.getStatus().getState().name())); - trow.addToColumnValue(new TCell().setStringVal(mv.getStatus().getSchemaChangeDetail())); - trow.addToColumnValue(new TCell().setStringVal(mv.getStatus().getRefreshState().name())); + trow.addToColumnValue(new TCell().setStringVal(mtmvStatus.getState().name())); + trow.addToColumnValue(new TCell().setStringVal(mtmvStatus.getSchemaChangeDetail())); + trow.addToColumnValue(new TCell().setStringVal(mtmvStatus.getRefreshState().name())); trow.addToColumnValue(new TCell().setStringVal(mv.getRefreshInfo().toString())); trow.addToColumnValue(new TCell().setStringVal(mv.getQuerySql())); trow.addToColumnValue(new TCell().setStringVal(mv.getMvProperties().toString())); trow.addToColumnValue(new TCell().setStringVal(mv.getMvPartitionInfo().toNameString())); - trow.addToColumnValue(new TCell().setBoolVal(MTMVPartitionUtil.isMTMVSync(mv))); + trow.addToColumnValue(new TCell().setBoolVal(isSync)); if (LOG.isDebugEnabled()) { - LOG.debug("mvend: " + mv.getName()); + LOG.debug("mv end: {}", mv.getName()); } dataBatch.add(trow); } diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/analysis/BindRelationTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/analysis/BindRelationTest.java index 369a57017cba28..eaeaa3b2edda8b 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/analysis/BindRelationTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/analysis/BindRelationTest.java @@ -17,23 +17,13 @@ package org.apache.doris.nereids.rules.analysis; -import org.apache.doris.catalog.Column; -import org.apache.doris.catalog.Database; -import org.apache.doris.catalog.DatabaseIf; -import org.apache.doris.catalog.KeysType; -import org.apache.doris.catalog.OlapTable; -import org.apache.doris.catalog.PartitionInfo; -import org.apache.doris.catalog.RandomDistributionInfo; -import org.apache.doris.catalog.Type; import org.apache.doris.nereids.analyzer.UnboundRelation; import org.apache.doris.nereids.pattern.GeneratedPlanPatterns; import org.apache.doris.nereids.rules.RulePromise; -import org.apache.doris.nereids.rules.analysis.BindRelation.CustomTableResolver; import org.apache.doris.nereids.trees.expressions.StatementScopeIdGenerator; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.logical.LogicalAggregate; import org.apache.doris.nereids.trees.plans.logical.LogicalOlapScan; -import org.apache.doris.nereids.util.PlanChecker; import org.apache.doris.nereids.util.PlanRewriter; import org.apache.doris.utframe.TestWithFeService; @@ -41,9 +31,6 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.util.List; -import java.util.Optional; - class BindRelationTest extends TestWithFeService implements GeneratedPlanPatterns { private static final String DB1 = "db1"; private static final String DB2 = "db2"; @@ -72,7 +59,7 @@ void bindInCurrentDb() { Plan plan = PlanRewriter.bottomUpRewrite(new UnboundRelation(StatementScopeIdGenerator.newRelationId(), ImmutableList.of("t")), connectContext, new BindRelation()); - Assertions.assertTrue(plan instanceof LogicalOlapScan); + Assertions.assertInstanceOf(LogicalOlapScan.class, plan); Assertions.assertEquals( ImmutableList.of("internal", DEFAULT_CLUSTER_PREFIX + DB1, "t"), ((LogicalOlapScan) plan).qualified()); @@ -84,63 +71,12 @@ void bindByDbQualifier() { Plan plan = PlanRewriter.bottomUpRewrite(new UnboundRelation(StatementScopeIdGenerator.newRelationId(), ImmutableList.of("db1", "t")), connectContext, new BindRelation()); - Assertions.assertTrue(plan instanceof LogicalOlapScan); + Assertions.assertInstanceOf(LogicalOlapScan.class, plan); Assertions.assertEquals( ImmutableList.of("internal", DEFAULT_CLUSTER_PREFIX + DB1, "t"), ((LogicalOlapScan) plan).qualified()); } - @Test - public void bindExternalRelation() { - connectContext.setDatabase(DEFAULT_CLUSTER_PREFIX + DB1); - String tableName = "external_table"; - - List externalTableColumns = ImmutableList.of( - new Column("id", Type.INT), - new Column("name", Type.VARCHAR) - ); - - Database externalDatabase = new Database(10000, DEFAULT_CLUSTER_PREFIX + DB1); - - OlapTable externalOlapTable = new OlapTable(1, tableName, externalTableColumns, KeysType.DUP_KEYS, - new PartitionInfo(), new RandomDistributionInfo(10)) { - @Override - public List getBaseSchema(boolean full) { - return externalTableColumns; - } - - @Override - public boolean hasDeleteSign() { - return false; - } - - @Override - public DatabaseIf getDatabase() { - return externalDatabase; - } - }; - - CustomTableResolver customTableResolver = qualifiedTable -> { - if (qualifiedTable.get(2).equals(tableName)) { - return externalOlapTable; - } else { - return null; - } - }; - - PlanChecker.from(connectContext) - .parse("select * from " + tableName + " as et join db1.t on et.id = t.a") - .customAnalyzer(Optional.of(customTableResolver)) // analyze internal relation - .matches( - logicalJoin( - logicalSubQueryAlias( - logicalOlapScan().when(r -> r.getTable() == externalOlapTable) - ), - logicalOlapScan().when(r -> r.getTable().getName().equals("t")) - ) - ); - } - @Test void bindRandomAggTable() { connectContext.setDatabase(DEFAULT_CLUSTER_PREFIX + DB1); @@ -148,7 +84,7 @@ void bindRandomAggTable() { Plan plan = PlanRewriter.bottomUpRewrite(new UnboundRelation(StatementScopeIdGenerator.newRelationId(), ImmutableList.of("tagg")), connectContext, new BindRelation()); - Assertions.assertTrue(plan instanceof LogicalAggregate); + Assertions.assertInstanceOf(LogicalAggregate.class, plan); Assertions.assertEquals( ImmutableList.of("internal", DEFAULT_CLUSTER_PREFIX + DB1, "tagg"), plan.getOutput().get(0).getQualifier()); diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/plans/PlanVisitorTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/plans/PlanVisitorTest.java index 0c54f8fad5a107..82c8122a18d72f 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/plans/PlanVisitorTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/plans/PlanVisitorTest.java @@ -17,34 +17,19 @@ package org.apache.doris.nereids.trees.plans; -import org.apache.doris.catalog.TableIf; -import org.apache.doris.catalog.TableIf.TableType; import org.apache.doris.nereids.rules.exploration.mv.MaterializedViewUtils; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.scalar.CurrentDate; import org.apache.doris.nereids.trees.expressions.functions.scalar.CurrentTime; import org.apache.doris.nereids.trees.expressions.functions.scalar.Now; -import org.apache.doris.nereids.trees.expressions.functions.scalar.Random; import org.apache.doris.nereids.trees.expressions.functions.scalar.UnixTimestamp; -import org.apache.doris.nereids.trees.expressions.functions.scalar.Uuid; -import org.apache.doris.nereids.trees.plans.physical.PhysicalPlan; -import org.apache.doris.nereids.trees.plans.visitor.TableCollector; -import org.apache.doris.nereids.trees.plans.visitor.TableCollector.TableCollectorContext; import org.apache.doris.nereids.util.PlanChecker; -import org.apache.doris.qe.SessionVariable; import org.apache.doris.utframe.TestWithFeService; -import com.google.common.collect.Sets; -import mockit.Mock; -import mockit.MockUp; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.util.BitSet; -import java.util.HashSet; import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; /** * Tests for plan visitors to make sure the result meets expectation. @@ -115,154 +100,6 @@ protected void runBeforeAll() throws Exception { + "inner join table3 t3 on t1.c1= t3.c2;"); } - @Test - public void test1() { - PlanChecker.from(connectContext) - .checkPlannerResult("SELECT *, random() FROM table1 " - + "LEFT SEMI JOIN table2 ON table1.c1 = table2.c1 " - + "WHERE table1.c1 IN (SELECT c1 FROM table2) OR table1.c1 < 10", - nereidsPlanner -> { - PhysicalPlan physicalPlan = nereidsPlanner.getPhysicalPlan(); - // Check nondeterministic collect - List nondeterministicFunctionSet = - MaterializedViewUtils.extractNondeterministicFunction(physicalPlan); - Assertions.assertEquals(1, nondeterministicFunctionSet.size()); - Assertions.assertTrue(nondeterministicFunctionSet.get(0) instanceof Random); - // Check get tables - TableCollectorContext collectorContext = new TableCollector.TableCollectorContext( - Sets.newHashSet(TableType.OLAP), true, connectContext); - physicalPlan.accept(TableCollector.INSTANCE, collectorContext); - Set expectedTables = new HashSet<>(); - expectedTables.add("table1"); - expectedTables.add("table2"); - Assertions.assertEquals( - collectorContext.getCollectedTables().stream() - .map(TableIf::getName) - .collect(Collectors.toSet()), - expectedTables); - }); - } - - @Test - public void test2() { - PlanChecker.from(connectContext) - .checkPlannerResult("SELECT view1.*, uuid() FROM view1 " - + "LEFT SEMI JOIN table2 ON view1.c1 = table2.c1 " - + "WHERE view1.c1 IN (SELECT c1 FROM table2) OR view1.c1 < 10", - nereidsPlanner -> { - PhysicalPlan physicalPlan = nereidsPlanner.getPhysicalPlan(); - // Check nondeterministic collect - List nondeterministicFunctionSet = - MaterializedViewUtils.extractNondeterministicFunction(physicalPlan); - Assertions.assertEquals(2, nondeterministicFunctionSet.size()); - Assertions.assertTrue(nondeterministicFunctionSet.get(0) instanceof Uuid); - Assertions.assertTrue(nondeterministicFunctionSet.get(1) instanceof Random); - // Check get tables - TableCollectorContext collectorContext = new TableCollector.TableCollectorContext( - Sets.newHashSet(TableType.OLAP), true, connectContext); - physicalPlan.accept(TableCollector.INSTANCE, collectorContext); - Set expectedTables = new HashSet<>(); - expectedTables.add("table1"); - expectedTables.add("table2"); - Assertions.assertEquals( - collectorContext.getCollectedTables().stream() - .map(TableIf::getName) - .collect(Collectors.toSet()), - expectedTables); - }); - } - - @Test - public void test3() throws Exception { - connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION"); - BitSet disableNereidsRules = connectContext.getSessionVariable().getDisableNereidsRules(); - new MockUp() { - @Mock - public BitSet getDisableNereidsRules() { - return disableNereidsRules; - } - }; - PlanChecker.from(connectContext) - .checkPlannerResult("SELECT mv1.*, uuid() FROM mv1 " - + "INNER JOIN view1 on mv1.c1 = view1.c2 " - + "LEFT SEMI JOIN table2 ON mv1.c1 = table2.c1 " - + "WHERE mv1.c1 IN (SELECT c1 FROM table2) OR mv1.c1 < 10", - nereidsPlanner -> { - PhysicalPlan physicalPlan = nereidsPlanner.getPhysicalPlan(); - // Check nondeterministic collect - List nondeterministicFunctionSet = - MaterializedViewUtils.extractNondeterministicFunction(physicalPlan); - Assertions.assertEquals(1, nondeterministicFunctionSet.size()); - Assertions.assertTrue(nondeterministicFunctionSet.get(0) instanceof Uuid); - // Check get tables - TableCollectorContext collectorContext = new TableCollector.TableCollectorContext( - Sets.newHashSet(TableType.OLAP), true, connectContext); - physicalPlan.accept(TableCollector.INSTANCE, collectorContext); - Set expectedTables = new HashSet<>(); - expectedTables.add("table1"); - expectedTables.add("table2"); - expectedTables.add("table3"); - Assertions.assertEquals( - collectorContext.getCollectedTables().stream() - .map(TableIf::getName) - .collect(Collectors.toSet()), - expectedTables); - - TableCollectorContext collectorContextWithNoExpand = - new TableCollector.TableCollectorContext(Sets.newHashSet(TableType.OLAP), - false, connectContext); - physicalPlan.accept(TableCollector.INSTANCE, collectorContextWithNoExpand); - Set expectedTablesWithNoExpand = new HashSet<>(); - expectedTablesWithNoExpand.add("table1"); - expectedTablesWithNoExpand.add("table2"); - Assertions.assertEquals( - collectorContextWithNoExpand.getCollectedTables().stream() - .map(TableIf::getName) - .collect(Collectors.toSet()), - expectedTablesWithNoExpand); - - TableCollectorContext mvCollectorContext = new TableCollector.TableCollectorContext( - Sets.newHashSet(TableType.MATERIALIZED_VIEW), true, connectContext); - physicalPlan.accept(TableCollector.INSTANCE, mvCollectorContext); - Set expectedMvs = new HashSet<>(); - expectedMvs.add("mv1"); - Assertions.assertEquals( - mvCollectorContext.getCollectedTables().stream() - .map(TableIf::getName) - .collect(Collectors.toSet()), - expectedMvs); - - TableCollectorContext mvCollectorContextWithNoExpand = - new TableCollector.TableCollectorContext( - Sets.newHashSet(TableType.MATERIALIZED_VIEW), false, connectContext); - physicalPlan.accept(TableCollector.INSTANCE, mvCollectorContextWithNoExpand); - Set expectedMvsWithNoExpand = new HashSet<>(); - expectedMvsWithNoExpand.add("mv1"); - Assertions.assertEquals( - mvCollectorContextWithNoExpand.getCollectedTables().stream() - .map(TableIf::getName) - .collect(Collectors.toSet()), - expectedMvsWithNoExpand); - - TableCollectorContext allTableTypeWithExpand = - new TableCollector.TableCollectorContext( - Sets.newHashSet(TableType.values()), true, connectContext); - physicalPlan.accept(TableCollector.INSTANCE, allTableTypeWithExpand); - // when collect in plan with expand, should collect table which is expended - Set expectedTablesWithExpand = new HashSet<>(); - expectedTablesWithExpand.add("mv1"); - expectedTablesWithExpand.add("table1"); - expectedTablesWithExpand.add("table2"); - expectedTablesWithExpand.add("table3"); - Assertions.assertEquals( - allTableTypeWithExpand.getCollectedTables().stream() - .map(TableIf::getName) - .collect(Collectors.toSet()), - expectedTablesWithExpand); - }); - dropMvByNereids("drop materialized view mv1"); - } - @Test public void testTimeFunction() { PlanChecker.from(connectContext) diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/util/PlanChecker.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/util/PlanChecker.java index f0a45d1e7bc852..77ecbd5dc7c4dd 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/util/PlanChecker.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/util/PlanChecker.java @@ -49,7 +49,6 @@ import org.apache.doris.nereids.rules.RuleFactory; import org.apache.doris.nereids.rules.RuleSet; import org.apache.doris.nereids.rules.RuleType; -import org.apache.doris.nereids.rules.analysis.BindRelation.CustomTableResolver; import org.apache.doris.nereids.rules.exploration.mv.InitMaterializationContextHook; import org.apache.doris.nereids.rules.rewrite.OneRewriteRuleFactory; import org.apache.doris.nereids.trees.plans.GroupPlan; @@ -71,7 +70,6 @@ import java.util.ArrayList; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.function.Consumer; import java.util.function.Supplier; @@ -147,12 +145,6 @@ public PlanChecker analyze(String sql) { return this; } - public PlanChecker customAnalyzer(Optional customTableResolver) { - this.cascadesContext.newAnalyzer(customTableResolver).analyze(); - this.cascadesContext.toMemo(); - return this; - } - public PlanChecker customRewrite(CustomRewriter customRewriter) { Rewriter.getWholeTreeRewriterWithCustomJobs(cascadesContext, ImmutableList.of(Rewriter.custom(RuleType.TEST_REWRITE, () -> customRewriter))) @@ -277,7 +269,7 @@ public NereidsPlanner plan(String sql) { LogicalPlan parsedPlan = new NereidsParser().parseSingle(sql); LogicalPlanAdapter parsedPlanAdaptor = new LogicalPlanAdapter(parsedPlan, statementContext); statementContext.setParsedStatement(parsedPlanAdaptor); - planner.planWithLock(parsedPlanAdaptor); + planner.plan(parsedPlanAdaptor); return planner; } @@ -576,7 +568,7 @@ public PlanChecker checkExplain(String sql, Consumer consumer) { new StatementContext(connectContext, new OriginStatement(sql, 0))); LogicalPlanAdapter adapter = LogicalPlanAdapter.of(parsed); adapter.setIsExplain(new ExplainOptions(ExplainLevel.ALL_PLAN, false)); - nereidsPlanner.planWithLock(adapter); + nereidsPlanner.plan(adapter); consumer.accept(nereidsPlanner); return this; } @@ -585,7 +577,7 @@ public PlanChecker checkPlannerResult(String sql, Consumer consu LogicalPlan parsed = new NereidsParser().parseSingle(sql); NereidsPlanner nereidsPlanner = new NereidsPlanner( new StatementContext(connectContext, new OriginStatement(sql, 0))); - nereidsPlanner.planWithLock(LogicalPlanAdapter.of(parsed)); + nereidsPlanner.plan(LogicalPlanAdapter.of(parsed)); consumer.accept(nereidsPlanner); return this; } diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/util/ReadLockTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/util/ReadLockTest.java index 1e1535a573610b..6cd85183b3df79 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/util/ReadLockTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/util/ReadLockTest.java @@ -118,7 +118,7 @@ public void testScalarSubQuery() { } @Test - public void testInserInto() { + public void testInsertInto() { String sql = "INSERT INTO supplier(s_suppkey, s_name, s_address, s_city, s_nation, s_region, s_phone) " + "SELECT lo_orderkey, '', '', '', '', '', '' FROM lineorder"; StatementContext statementContext = MemoTestUtils.createStatementContext(connectContext, sql); @@ -129,7 +129,6 @@ public void testInserInto() { PhysicalProperties.ANY ); Map, TableIf> f = statementContext.getTables(); - // when table in insert would not be added to statement context, but be lock when insert Assertions.assertEquals(1, f.size()); Set tableNames = new HashSet<>(); for (Map.Entry, TableIf> entry : f.entrySet()) { @@ -137,5 +136,13 @@ public void testInserInto() { tableNames.add(table.getName()); } Assertions.assertTrue(tableNames.contains("lineorder")); + f = statementContext.getInsertTargetTables(); + Assertions.assertEquals(1, f.size()); + tableNames = new HashSet<>(); + for (Map.Entry, TableIf> entry : f.entrySet()) { + TableIf table = entry.getValue(); + tableNames.add(table.getName()); + } + Assertions.assertTrue(tableNames.contains("supplier")); } } diff --git a/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java b/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java index 3c793cfc72090d..e8b545f3ffe2c1 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/qe/OlapQueryCacheTest.java @@ -502,7 +502,7 @@ private StatementBase parseSqlByNereids(String sql) { ctx.setStatementContext(statementContext); NereidsPlanner nereidsPlanner = new NereidsPlanner(statementContext); LogicalPlanAdapter adapter = new LogicalPlanAdapter(plan, statementContext); - nereidsPlanner.planWithLock(adapter); + nereidsPlanner.plan(adapter); statementContext.setParsedStatement(adapter); stmt = adapter; } catch (Throwable throwable) { From b1ccd3696a906c2d295b5af5e21665674d56a96a Mon Sep 17 00:00:00 2001 From: Xinyi Zou Date: Thu, 19 Dec 2024 19:56:19 +0800 Subject: [PATCH 12/82] [fix](memory) Fix adjust cache capacity (#45603) ### What problem does this PR solve? If the cache capacity adjustment is not completed within 500ms (conf::memory_gc_sleep_time_ms), the next adjustment will be skipped. In some scenarios, after Memory GC adjusts the cache capacity to 0, the next adjustment to restore the cache capacity is skipped, the cache capacity will remain at 0 for a long time. --- be/src/common/daemon.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/be/src/common/daemon.cpp b/be/src/common/daemon.cpp index 12bf1749a5694d..2aaa58f4feb597 100644 --- a/be/src/common/daemon.cpp +++ b/be/src/common/daemon.cpp @@ -230,6 +230,11 @@ void refresh_memory_state_after_memory_change() { } void refresh_cache_capacity() { + if (doris::GlobalMemoryArbitrator::cache_adjust_capacity_notify.load( + std::memory_order_relaxed)) { + // the last cache capacity adjustment has not been completed. + return; + } if (refresh_cache_capacity_sleep_time_ms <= 0) { auto cache_capacity_reduce_mem_limit = int64_t( doris::MemInfo::soft_mem_limit() * config::cache_capacity_reduce_mem_limit_frac); @@ -247,6 +252,8 @@ void refresh_cache_capacity() { new_cache_capacity_adjust_weighted; doris::GlobalMemoryArbitrator::notify_cache_adjust_capacity(); refresh_cache_capacity_sleep_time_ms = config::memory_gc_sleep_time_ms; + } else { + refresh_cache_capacity_sleep_time_ms = 0; } } refresh_cache_capacity_sleep_time_ms -= config::memory_maintenance_sleep_time_ms; From 6cce4087f94b5ef91b7ffcba294b4c5da79f1f14 Mon Sep 17 00:00:00 2001 From: Xinyi Zou Date: Thu, 19 Dec 2024 19:56:38 +0800 Subject: [PATCH 13/82] [fix](memory) Process available memory to increase the Jemalloc cache (#45621) ### What problem does this PR solve? Currently, when the Doris BE process exceed memory limit, Jemalloc cache will be manually released. Add the Jemalloc cache to the available memory of the BE process is expected to have little impact on the risk of the process OOM killer. the process memory used has already subtracted the Jemalloc cache. Not merge to 2.1 because 2.1 is stable now --- be/src/runtime/memory/global_memory_arbitrator.h | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/be/src/runtime/memory/global_memory_arbitrator.h b/be/src/runtime/memory/global_memory_arbitrator.h index 075113088fbc5b..a7a85725ab10c9 100644 --- a/be/src/runtime/memory/global_memory_arbitrator.h +++ b/be/src/runtime/memory/global_memory_arbitrator.h @@ -76,7 +76,7 @@ class GlobalMemoryArbitrator { static inline int64_t sys_mem_available() { return MemInfo::_s_sys_mem_available.load(std::memory_order_relaxed) - refresh_interval_memory_growth.load(std::memory_order_relaxed) - - process_reserved_memory(); + process_reserved_memory() + static_cast(MemInfo::allocator_cache_mem()); } static inline std::string sys_mem_available_str() { @@ -91,12 +91,14 @@ class GlobalMemoryArbitrator { static inline std::string sys_mem_available_details_str() { auto msg = fmt::format( "sys available memory {}(= {}[proc/available] - {}[reserved] - " - "{}B[waiting_refresh])", + "{}B[waiting_refresh] + {}[tc/jemalloc_cache])", PrettyPrinter::print(sys_mem_available(), TUnit::BYTES), PrettyPrinter::print(MemInfo::_s_sys_mem_available.load(std::memory_order_relaxed), TUnit::BYTES), PrettyPrinter::print(process_reserved_memory(), TUnit::BYTES), - refresh_interval_memory_growth); + refresh_interval_memory_growth, + PrettyPrinter::print(static_cast(MemInfo::allocator_cache_mem()), + TUnit::BYTES)); #ifdef ADDRESS_SANITIZER msg = "[ASAN]" + msg; #endif From 55c26e03e5f76fe80e6bedd2bc31760e55cd6707 Mon Sep 17 00:00:00 2001 From: linrrarity <142187136+linrrzqqq@users.noreply.github.com> Date: Thu, 19 Dec 2024 20:04:15 +0800 Subject: [PATCH 14/82] [Enhancement](Log) Reduce usage of log fatal(PART I) (#42344) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Proposed changes Issue Number: close #40835 use `throw Exception` to replace them which not in `if constexpr`, and change part of `INTERNAL_ERROR` in this [pr](https://github.com/apache/doris/pull/38144/files)(file `aggregate_function_reader_first_last.h` and `aggregate_function_window.h`) to `FatalError`. for those in `if constexpr else{...}`, use `static_assert` about template argument which used in that judgement to advance them to compile time but there seems to be some bugs with the template parameter instantiation in the files `comparison_predicate.h`, `set_probe_sink_operator.cpp`, `set_sink_operator.cpp`, `comparison_predicate.h`, `in_list_predicate.h` and `set_source_operator.cpp` that I haven't modified yet. --------- Co-authored-by: wyxxxcat <1520358997@qq.com> --- be/src/common/status.h | 11 +++- be/src/gutil/strings/escaping.cc | 5 +- be/src/gutil/strings/numbers.cc | 6 +- be/src/gutil/strings/util.cc | 5 +- .../threading/thread_collision_warner.cc | 8 ++- be/src/io/file_factory.h | 5 +- be/src/olap/block_column_predicate.h | 12 ++-- be/src/olap/data_dir.cpp | 16 +++--- be/src/olap/key_coder.h | 13 ++--- be/src/olap/like_column_predicate.h | 8 +-- be/src/olap/match_predicate.h | 3 +- be/src/olap/null_predicate.h | 4 +- be/src/olap/olap_common.h | 6 +- be/src/olap/page_cache.h | 6 +- be/src/olap/rowset/beta_rowset_writer_v2.h | 3 +- be/src/olap/rowset/rowset_writer.h | 4 +- .../segment_v2/hierarchical_data_reader.cpp | 6 +- be/src/olap/storage_policy.cpp | 6 +- be/src/olap/tablet_reader.cpp | 2 +- be/src/pipeline/dependency.h | 3 +- be/src/pipeline/exec/exchange_sink_buffer.cpp | 3 +- be/src/pipeline/exec/exchange_sink_buffer.h | 5 +- be/src/pipeline/exec/hashjoin_build_sink.cpp | 4 +- be/src/pipeline/exec/operator.cpp | 3 +- be/src/pipeline/exec/operator.h | 6 +- be/src/runtime/exec_env_init.cpp | 10 ++-- be/src/runtime/jsonb_value.h | 33 ++++------- be/src/runtime/memory/cache_manager.h | 3 +- be/src/runtime/memory/cache_policy.h | 6 +- be/src/runtime/memory/lru_cache_policy.h | 3 +- .../runtime/memory/thread_mem_tracker_mgr.h | 4 +- be/src/runtime/snapshot_loader.cpp | 5 +- .../stream_load/stream_load_executor.cpp | 3 +- be/src/runtime/thread_context.h | 7 +-- be/src/util/binary_cast.hpp | 3 +- be/src/util/bit_util.h | 4 +- be/src/util/bitmap_value.h | 3 +- be/src/util/block_compression.cpp | 3 +- be/src/util/easy_json.cc | 5 +- be/src/util/jsonb_utils.h | 4 +- be/src/util/rle_encoding.h | 2 +- be/src/util/threadpool.cpp | 11 ++-- be/src/util/timezone_utils.cpp | 4 +- .../aggregate_function_map.h | 5 +- .../aggregate_function_reader_first_last.h | 17 ++---- .../aggregate_function_window.h | 19 +++---- be/src/vec/columns/column_string.cpp | 10 ++-- be/src/vec/common/assert_cast.h | 19 +++---- .../vec/common/hash_table/string_hash_table.h | 3 +- be/src/vec/common/schema_util.cpp | 5 +- be/src/vec/core/block.cpp | 4 +- be/src/vec/core/decimal_comparison.h | 23 +++----- be/src/vec/core/field.h | 56 ++++++++----------- be/src/vec/core/types.h | 3 +- .../vec/data_types/data_type_number_base.cpp | 3 +- be/src/vec/data_types/data_type_number_base.h | 3 +- be/src/vec/data_types/serde/data_type_serde.h | 5 +- .../exec/format/parquet/bool_rle_decoder.cpp | 9 +-- be/src/vec/exec/format/parquet/decoder.h | 4 +- .../format/parquet/delta_bit_pack_decoder.h | 10 ++-- .../format/parquet/parquet_column_convert.h | 9 +-- .../format/parquet/vparquet_column_reader.h | 4 +- be/src/vec/exec/jni_connector.cpp | 4 +- be/src/vec/exec/scan/split_source_connector.h | 6 +- be/src/vec/exprs/vexpr.h | 12 ++-- .../functions/array/function_array_apply.cpp | 4 +- be/src/vec/functions/function_cast.h | 12 +++- be/src/vec/json/simd_json_parser.h | 4 +- be/src/vec/olap/olap_data_convertor.h | 6 +- be/src/vec/runtime/vdatetime_value.cpp | 6 +- be/test/util/threadpool_test.cpp | 1 + 71 files changed, 250 insertions(+), 282 deletions(-) diff --git a/be/src/common/status.h b/be/src/common/status.h index d059f289402cea..0252ec8564feeb 100644 --- a/be/src/common/status.h +++ b/be/src/common/status.h @@ -293,7 +293,8 @@ namespace ErrorCode { E(ENTRY_NOT_FOUND, -7002, false); \ E(INVALID_TABLET_STATE, -7211, false); \ E(ROWSETS_EXPIRED, -7311, false); \ - E(CGROUP_ERROR, -7411, false); + E(CGROUP_ERROR, -7411, false); \ + E(FATAL_ERROR, -7412, false); // Define constexpr int error_code_name = error_code_value #define M(NAME, ERRORCODE, ENABLESTACKTRACE) constexpr int NAME = ERRORCODE; @@ -446,6 +447,14 @@ class [[nodiscard]] Status { static Status OK() { return {}; } + template + static Status FatalError(std::string_view msg, Args&&... args) { +#ifndef NDEBUG + LOG(FATAL) << fmt::format(msg, std::forward(args)...); +#endif + return Error(msg, std::forward(args)...); + } + // default have stacktrace. could disable manually. #define ERROR_CTOR(name, code) \ template \ diff --git a/be/src/gutil/strings/escaping.cc b/be/src/gutil/strings/escaping.cc index 2ff59104f6d5ce..c6ba8e2f9c375e 100644 --- a/be/src/gutil/strings/escaping.cc +++ b/be/src/gutil/strings/escaping.cc @@ -10,6 +10,8 @@ #include #include +#include "common/exception.h" + using std::numeric_limits; #include @@ -1084,7 +1086,8 @@ int Base64UnescapeInternal(const char* src, int szsrc, char* dest, int szdest, default: // state should have no other values at this point. - LOG(FATAL) << "This can't happen; base64 decoder state = " << state; + throw doris::Exception( + doris::Status::FatalError("This can't happen; base64 decoder state = {}", state)); } // The remainder of the string should be all whitespace, mixed with diff --git a/be/src/gutil/strings/numbers.cc b/be/src/gutil/strings/numbers.cc index f471bf31bd08bb..f044ea08d31551 100644 --- a/be/src/gutil/strings/numbers.cc +++ b/be/src/gutil/strings/numbers.cc @@ -19,6 +19,8 @@ #include #include +#include "common/exception.h" + using std::numeric_limits; #include @@ -772,8 +774,8 @@ uint64 atoi_kmgt(const char* s) { scale = GG_ULONGLONG(1) << 40; break; default: - LOG(FATAL) << "Invalid mnemonic: `" << c << "';" - << " should be one of `K', `M', `G', and `T'."; + throw doris::Exception(doris::Status::FatalError( + "Invalid mnemonic: `{}'; should be one of `K', `M', `G', and `T'.", c)); } } return n * scale; diff --git a/be/src/gutil/strings/util.cc b/be/src/gutil/strings/util.cc index 80d5d463430c77..37c09d63b24fff 100644 --- a/be/src/gutil/strings/util.cc +++ b/be/src/gutil/strings/util.cc @@ -19,6 +19,8 @@ #include #include +#include "common/exception.h" + using std::copy; using std::max; using std::min; @@ -489,8 +491,7 @@ const char* strstr_delimited(const char* haystack, const char* needle, char deli ++haystack; } } - LOG(FATAL) << "Unreachable statement"; - return nullptr; + throw doris::Exception(doris::Status::FatalError("Unreachable statement")); } // ---------------------------------------------------------------------- diff --git a/be/src/gutil/threading/thread_collision_warner.cc b/be/src/gutil/threading/thread_collision_warner.cc index d2f1e47f8e02d9..fd51a9195d629e 100644 --- a/be/src/gutil/threading/thread_collision_warner.cc +++ b/be/src/gutil/threading/thread_collision_warner.cc @@ -4,6 +4,9 @@ #include "gutil/threading/thread_collision_warner.h" +#include "common/exception.h" +#include "common/status.h" + #ifdef __linux__ #include #else @@ -19,8 +22,9 @@ namespace base { void DCheckAsserter::warn(int64_t previous_thread_id, int64_t current_thread_id) { - LOG(FATAL) << "Thread Collision! Previous thread id: " << previous_thread_id - << ", current thread id: " << current_thread_id; + throw doris::Exception(doris::Status::FatalError( + "Thread Collision! Previous thread id: {}, current thread id: {}", previous_thread_id, + current_thread_id)); } static subtle::Atomic64 CurrentThread() { diff --git a/be/src/io/file_factory.h b/be/src/io/file_factory.h index 9d9d714812ffe9..afa54e221664c9 100644 --- a/be/src/io/file_factory.h +++ b/be/src/io/file_factory.h @@ -118,10 +118,9 @@ class FileFactory { case TStorageBackendType::HDFS: return TFileType::FILE_HDFS; default: - LOG(FATAL) << "not match type to convert, from type:" << type; + throw Exception(Status::FatalError("not match type to convert, from type:{}", type)); } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } }; diff --git a/be/src/olap/block_column_predicate.h b/be/src/olap/block_column_predicate.h index eed5e18329acf7..b6ff115c34c72d 100644 --- a/be/src/olap/block_column_predicate.h +++ b/be/src/olap/block_column_predicate.h @@ -74,25 +74,21 @@ class BlockColumnPredicate { } virtual bool can_do_apply_safely(PrimitiveType input_type, bool is_null) const { - LOG(FATAL) << "should not reach here"; - return true; + throw Exception(Status::FatalError("should not reach here")); } virtual bool support_zonemap() const { return true; } virtual bool evaluate_and(const std::pair& statistic) const { - LOG(FATAL) << "should not reach here"; - return true; + throw Exception(Status::FatalError("should not reach here")); } virtual bool evaluate_and(const segment_v2::BloomFilter* bf) const { - LOG(FATAL) << "should not reach here"; - return true; + throw Exception(Status::FatalError("should not reach here")); } virtual bool evaluate_and(const StringRef* dict_words, const size_t dict_num) const { - LOG(FATAL) << "should not reach here"; - return true; + throw Exception(Status::FatalError("should not reach here")); } virtual bool can_do_bloom_filter(bool ngram) const { return false; } diff --git a/be/src/olap/data_dir.cpp b/be/src/olap/data_dir.cpp index 4070bd1dd4340e..4aa215e0c2eb16 100644 --- a/be/src/olap/data_dir.cpp +++ b/be/src/olap/data_dir.cpp @@ -316,10 +316,10 @@ Status DataDir::_check_incompatible_old_format_tablet() { std::string_view value) -> bool { // if strict check incompatible old format, then log fatal if (config::storage_strict_check_incompatible_old_format) { - LOG(FATAL) - << "There are incompatible old format metas, current version does not support " - << "and it may lead to data missing!!! " - << "tablet_id = " << tablet_id << " schema_hash = " << schema_hash; + throw Exception(Status::FatalError( + "There are incompatible old format metas, current version does not support and " + "it may lead to data missing!!! tablet_id = {} schema_hash = {}", + tablet_id, schema_hash)); } else { LOG(WARNING) << "There are incompatible old format metas, current version does not support " @@ -451,7 +451,8 @@ Status DataDir::load() { << ", loaded tablet: " << tablet_ids.size() << ", error tablet: " << failed_tablet_ids.size() << ", path: " << _path; if (!config::ignore_load_tablet_failure) { - LOG(FATAL) << "load tablets encounter failure. stop BE process. path: " << _path; + throw Exception(Status::FatalError( + "load tablets encounter failure. stop BE process. path: {}", _path)); } } if (!load_tablet_status) { @@ -495,10 +496,9 @@ Status DataDir::load() { } } if (rowset_partition_id_eq_0_num > config::ignore_invalid_partition_id_rowset_num) { - LOG(FATAL) << fmt::format( + throw Exception(Status::FatalError( "roswet partition id eq 0 is {} bigger than config {}, be exit, plz check be.INFO", - rowset_partition_id_eq_0_num, config::ignore_invalid_partition_id_rowset_num); - exit(-1); + rowset_partition_id_eq_0_num, config::ignore_invalid_partition_id_rowset_num)); } // traverse rowset diff --git a/be/src/olap/key_coder.h b/be/src/olap/key_coder.h index 6885a0d96f251b..549ac53656b647 100644 --- a/be/src/olap/key_coder.h +++ b/be/src/olap/key_coder.h @@ -109,8 +109,8 @@ class KeyCoderTraits< case 16: return BigEndian::FromHost128(val); default: - LOG(FATAL) << "Invalid type to big endian, type=" << int(field_type) - << ", size=" << sizeof(UnsignedCppType); + throw Exception(Status::FatalError("Invalid type to big endian, type={}, size={}", + int(field_type), sizeof(UnsignedCppType))); } } } @@ -300,8 +300,7 @@ class KeyCoderTraits { } static Status decode_ascending(Slice* encoded_key, size_t index_size, uint8_t* cell_ptr) { - LOG(FATAL) << "decode_ascending is not implemented"; - return Status::OK(); + throw Exception(Status::FatalError("decode_ascending is not implemented")); } }; @@ -320,8 +319,7 @@ class KeyCoderTraits { } static Status decode_ascending(Slice* encoded_key, size_t index_size, uint8_t* cell_ptr) { - LOG(FATAL) << "decode_ascending is not implemented"; - return Status::OK(); + throw Exception(Status::FatalError("decode_ascending is not implemented")); } }; @@ -340,8 +338,7 @@ class KeyCoderTraits { } static Status decode_ascending(Slice* encoded_key, size_t index_size, uint8_t* cell_ptr) { - LOG(FATAL) << "decode_ascending is not implemented"; - return Status::OK(); + throw Exception(Status::FatalError("decode_ascending is not implemented")); } }; diff --git a/be/src/olap/like_column_predicate.h b/be/src/olap/like_column_predicate.h index 31763d45f7edc7..e0d185c7bd3e98 100644 --- a/be/src/olap/like_column_predicate.h +++ b/be/src/olap/like_column_predicate.h @@ -128,8 +128,8 @@ class LikeColumnPredicate : public ColumnPredicate { } } } else { - LOG(FATAL) << "vectorized (not) like predicates should be dict column"; - __builtin_unreachable(); + throw Exception(Status::FatalError( + "vectorized (not) like predicates should be dict column")); } } else { if (column.is_column_dictionary()) { @@ -153,8 +153,8 @@ class LikeColumnPredicate : public ColumnPredicate { } } } else { - LOG(FATAL) << "vectorized (not) like predicates should be dict column"; - __builtin_unreachable(); + throw Exception(Status::FatalError( + "vectorized (not) like predicates should be dict column")); } } } diff --git a/be/src/olap/match_predicate.h b/be/src/olap/match_predicate.h index ad202b7b2427cf..3ff1775fd8882a 100644 --- a/be/src/olap/match_predicate.h +++ b/be/src/olap/match_predicate.h @@ -55,8 +55,7 @@ class MatchPredicate : public ColumnPredicate { //evaluate predicate on Bitmap Status evaluate(BitmapIndexIterator* iterator, uint32_t num_rows, roaring::Roaring* roaring) const override { - LOG(FATAL) << "Not Implemented MatchPredicate::evaluate"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Not Implemented MatchPredicate::evaluate")); } //evaluate predicate on inverted diff --git a/be/src/olap/null_predicate.h b/be/src/olap/null_predicate.h index 59480264b46103..8e3fef1ff27695 100644 --- a/be/src/olap/null_predicate.h +++ b/be/src/olap/null_predicate.h @@ -87,8 +87,8 @@ class NullPredicate : public ColumnPredicate { if (_is_null) { return bf->test_bytes(nullptr, 0); } else { - LOG(FATAL) << "Bloom filter is not supported by predicate type: is_null=" << _is_null; - return true; + throw Exception(Status::FatalError( + "Bloom filter is not supported by predicate type: is_null=")); } } diff --git a/be/src/olap/olap_common.h b/be/src/olap/olap_common.h index 11249bafb1e3c0..3b892e5d360e54 100644 --- a/be/src/olap/olap_common.h +++ b/be/src/olap/olap_common.h @@ -36,6 +36,7 @@ #include #include "common/config.h" +#include "common/exception.h" #include "io/io_common.h" #include "olap/olap_define.h" #include "olap/rowset/rowset_fwd.h" @@ -419,7 +420,8 @@ struct RowsetId { LOG(WARNING) << "failed to init rowset id: " << rowset_id_str; high = next_rowset_id().hi; } else { - LOG(FATAL) << "failed to init rowset id: " << rowset_id_str; + throw Exception( + Status::FatalError("failed to init rowset id: {}", rowset_id_str)); } } init(1, high, 0, 0); @@ -440,7 +442,7 @@ struct RowsetId { void init(int64_t id_version, int64_t high, int64_t middle, int64_t low) { version = id_version; if (UNLIKELY(high >= MAX_ROWSET_ID)) { - LOG(FATAL) << "inc rowsetid is too large:" << high; + throw Exception(Status::FatalError("inc rowsetid is too large:{}", high)); } hi = (id_version << 56) + (high & LOW_56_BITS); mi = middle; diff --git a/be/src/olap/page_cache.h b/be/src/olap/page_cache.h index 32b6683e7823b0..db1a6808345525 100644 --- a/be/src/olap/page_cache.h +++ b/be/src/olap/page_cache.h @@ -176,11 +176,9 @@ class StoragePageCache { return _pk_index_page_cache.get(); } default: - LOG(FATAL) << "get error type page cache"; - __builtin_unreachable(); + throw Exception(Status::FatalError("get error type page cache")); } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } }; diff --git a/be/src/olap/rowset/beta_rowset_writer_v2.h b/be/src/olap/rowset/beta_rowset_writer_v2.h index 78ec4a7dce703c..9040003a68d0d8 100644 --- a/be/src/olap/rowset/beta_rowset_writer_v2.h +++ b/be/src/olap/rowset/beta_rowset_writer_v2.h @@ -99,8 +99,7 @@ class BetaRowsetWriterV2 : public RowsetWriter { }; RowsetSharedPtr manual_build(const RowsetMetaSharedPtr& rowset_meta) override { - LOG(FATAL) << "not implemeted"; - return nullptr; + throw Exception(Status::FatalError("not implemeted")); } PUniqueId load_id() override { return _context.load_id; } diff --git a/be/src/olap/rowset/rowset_writer.h b/be/src/olap/rowset/rowset_writer.h index f84ff964ea3051..0a0d36ea04a661 100644 --- a/be/src/olap/rowset/rowset_writer.h +++ b/be/src/olap/rowset/rowset_writer.h @@ -170,7 +170,9 @@ class RowsetWriter { virtual int32_t allocate_segment_id() = 0; - virtual void set_segment_start_id(int num_segment) { LOG(FATAL) << "not supported!"; } + virtual void set_segment_start_id(int num_segment) { + throw Exception(Status::FatalError("not supported!")); + } virtual int64_t delete_bitmap_ns() { return 0; } diff --git a/be/src/olap/rowset/segment_v2/hierarchical_data_reader.cpp b/be/src/olap/rowset/segment_v2/hierarchical_data_reader.cpp index db6bac6b8b4c09..fe7167e9444a76 100644 --- a/be/src/olap/rowset/segment_v2/hierarchical_data_reader.cpp +++ b/be/src/olap/rowset/segment_v2/hierarchical_data_reader.cpp @@ -80,8 +80,7 @@ Status HierarchicalDataReader::init(const ColumnIteratorOptions& opts) { } Status HierarchicalDataReader::seek_to_first() { - LOG(FATAL) << "Not implemented"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Not implemented")); } Status HierarchicalDataReader::seek_to_ordinal(ordinal_t ord) { @@ -159,8 +158,7 @@ Status ExtractReader::init(const ColumnIteratorOptions& opts) { } Status ExtractReader::seek_to_first() { - LOG(FATAL) << "Not implemented"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Not implemented")); } Status ExtractReader::seek_to_ordinal(ordinal_t ord) { diff --git a/be/src/olap/storage_policy.cpp b/be/src/olap/storage_policy.cpp index 837e9bed178e3a..3b4a1f1a185678 100644 --- a/be/src/olap/storage_policy.cpp +++ b/be/src/olap/storage_policy.cpp @@ -141,8 +141,10 @@ std::vector> get_storage_resource_ids() { namespace { [[noreturn]] void exit_at_unknown_path_version(std::string_view resource_id, int64_t path_version) { - LOG(FATAL) << "unknown path version, please upgrade BE or drop this storage vault. resource_id=" - << resource_id << " path_version=" << path_version; + throw Exception( + Status::FatalError("unknown path version, please upgrade BE or drop this storage " + "vault. resource_id={} path_version={}", + resource_id, path_version)); } } // namespace diff --git a/be/src/olap/tablet_reader.cpp b/be/src/olap/tablet_reader.cpp index a83e0bfdbf4c30..17cab2a3c0c834 100644 --- a/be/src/olap/tablet_reader.cpp +++ b/be/src/olap/tablet_reader.cpp @@ -61,7 +61,7 @@ using namespace ErrorCode; void TabletReader::ReaderParams::check_validation() const { if (UNLIKELY(version.first == -1 && is_segcompaction == false)) { - LOG(FATAL) << "version is not set. tablet=" << tablet->tablet_id(); + throw Exception(Status::FatalError("version is not set. tablet={}", tablet->tablet_id())); } } diff --git a/be/src/pipeline/dependency.h b/be/src/pipeline/dependency.h index f1cfe2b02977e1..ecbd49a5647c2e 100644 --- a/be/src/pipeline/dependency.h +++ b/be/src/pipeline/dependency.h @@ -723,8 +723,7 @@ inline std::string get_exchange_type_name(ExchangeType idx) { case ExchangeType::LOCAL_MERGE_SORT: return "LOCAL_MERGE_SORT"; } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } struct DataDistribution { diff --git a/be/src/pipeline/exec/exchange_sink_buffer.cpp b/be/src/pipeline/exec/exchange_sink_buffer.cpp index e3f895444d4168..800ef6150738d6 100644 --- a/be/src/pipeline/exec/exchange_sink_buffer.cpp +++ b/be/src/pipeline/exec/exchange_sink_buffer.cpp @@ -422,8 +422,7 @@ void ExchangeSinkBuffer::_ended(InstanceLoId id) { } LOG(INFO) << ss.str(); - LOG(FATAL) << "not find the instance id"; - __builtin_unreachable(); + throw Exception(Status::FatalError("not find the instance id")); } else { std::unique_lock lock(*_instance_to_package_queue_mutex[id]); _running_sink_count[id]--; diff --git a/be/src/pipeline/exec/exchange_sink_buffer.h b/be/src/pipeline/exec/exchange_sink_buffer.h index 458c7c3f66e3ee..a381c5aff144f3 100644 --- a/be/src/pipeline/exec/exchange_sink_buffer.h +++ b/be/src/pipeline/exec/exchange_sink_buffer.h @@ -155,10 +155,9 @@ class ExchangeSendCallback : public ::doris::DummyBrpcCallback { start_rpc_time); } } catch (const std::exception& exp) { - LOG(FATAL) << "brpc callback error: " << exp.what(); + throw Exception(Status::FatalError("brpc callback error: {}", exp.what())); } catch (...) { - LOG(FATAL) << "brpc callback error."; - __builtin_unreachable(); + throw Exception(Status::FatalError("brpc callback error.")); } } int64_t start_rpc_time; diff --git a/be/src/pipeline/exec/hashjoin_build_sink.cpp b/be/src/pipeline/exec/hashjoin_build_sink.cpp index 19e8493e596a7e..47560875b51252 100644 --- a/be/src/pipeline/exec/hashjoin_build_sink.cpp +++ b/be/src/pipeline/exec/hashjoin_build_sink.cpp @@ -303,9 +303,7 @@ Status HashJoinBuildSinkLocalState::process_build_block(RuntimeState* state, [&](std::monostate& arg, auto join_op, auto short_circuit_for_null_in_build_side, auto with_other_conjuncts) -> Status { - LOG(FATAL) << "FATAL: uninited hash table"; - __builtin_unreachable(); - return Status::OK(); + throw Exception(Status::FatalError("FATAL: uninited hash table")); }, [&](auto&& arg, auto&& join_op, auto short_circuit_for_null_in_build_side, auto with_other_conjuncts) -> Status { diff --git a/be/src/pipeline/exec/operator.cpp b/be/src/pipeline/exec/operator.cpp index f6664e147a3dab..bb254aae72b8a7 100644 --- a/be/src/pipeline/exec/operator.cpp +++ b/be/src/pipeline/exec/operator.cpp @@ -414,8 +414,7 @@ std::shared_ptr DataSinkOperatorX::create_shar return nullptr; } else if constexpr (std::is_same_v) { - LOG(FATAL) << "should not reach here!"; - return nullptr; + throw Exception(Status::FatalError("should not reach here!")); } else { auto ss = LocalStateType::SharedStateType::create_shared(); ss->id = operator_id(); diff --git a/be/src/pipeline/exec/operator.h b/be/src/pipeline/exec/operator.h index a2c8e110cedac3..df6e9c913b6b4c 100644 --- a/be/src/pipeline/exec/operator.h +++ b/be/src/pipeline/exec/operator.h @@ -632,12 +632,10 @@ class OperatorXBase : public OperatorBase { _limit(-1) {} virtual Status init(const TPlanNode& tnode, RuntimeState* state); Status init(const TDataSink& tsink) override { - LOG(FATAL) << "should not reach here!"; - return Status::OK(); + throw Exception(Status::FatalError("should not reach here!")); } virtual Status init(ExchangeType type) { - LOG(FATAL) << "should not reach here!"; - return Status::OK(); + throw Exception(Status::FatalError("should not reach here!")); } [[noreturn]] virtual const std::vector& runtime_filter_descs() { throw doris::Exception(ErrorCode::NOT_IMPLEMENTED_ERROR, _op_name); diff --git a/be/src/runtime/exec_env_init.cpp b/be/src/runtime/exec_env_init.cpp index a371cdb947ff56..2d7554e702969f 100644 --- a/be/src/runtime/exec_env_init.cpp +++ b/be/src/runtime/exec_env_init.cpp @@ -421,9 +421,9 @@ void ExecEnv::init_file_cache_factory(std::vector& cache_paths std::unordered_set cache_path_set; Status rest = doris::parse_conf_cache_paths(doris::config::file_cache_path, cache_paths); if (!rest) { - LOG(FATAL) << "parse config file cache path failed, path=" << doris::config::file_cache_path - << ", reason=" << rest.msg(); - exit(-1); + throw Exception( + Status::FatalError("parse config file cache path failed, path={}, reason={}", + doris::config::file_cache_path, rest.msg())); } doris::Status cache_status; @@ -437,8 +437,8 @@ void ExecEnv::init_file_cache_factory(std::vector& cache_paths cache_path.path, cache_path.init_settings()); if (!cache_status.ok()) { if (!doris::config::ignore_broken_disk) { - LOG(FATAL) << "failed to init file cache, err: " << cache_status; - exit(-1); + throw Exception( + Status::FatalError("failed to init file cache, err: {}", cache_status)); } LOG(WARNING) << "failed to init file cache, err: " << cache_status; } diff --git a/be/src/runtime/jsonb_value.h b/be/src/runtime/jsonb_value.h index 65f4927759c304..5f530db1ac8117 100644 --- a/be/src/runtime/jsonb_value.h +++ b/be/src/runtime/jsonb_value.h @@ -61,58 +61,47 @@ struct JsonBinaryValue { } bool operator==(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } // != bool ne(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } // <= bool le(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } // >= bool ge(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } // < bool lt(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } // > bool gt(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } bool operator!=(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } bool operator<=(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } bool operator>=(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } bool operator<(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } bool operator>(const JsonBinaryValue& other) const { - LOG(FATAL) << "comparing between JsonBinaryValue is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonBinaryValue is not supported")); } Status from_json_string(const char* s, size_t len); diff --git a/be/src/runtime/memory/cache_manager.h b/be/src/runtime/memory/cache_manager.h index a2a089b929dbdf..1e89e957ba1ce6 100644 --- a/be/src/runtime/memory/cache_manager.h +++ b/be/src/runtime/memory/cache_manager.h @@ -40,7 +40,8 @@ class CacheManager { #ifdef BE_TEST _caches.erase(it); #else - LOG(FATAL) << "Repeat register cache " << CachePolicy::type_string(cache->type()); + throw Exception(Status::FatalError("Repeat register cache {}", + CachePolicy::type_string(cache->type()))); #endif // BE_TEST } _caches.insert({cache->type(), cache}); diff --git a/be/src/runtime/memory/cache_policy.h b/be/src/runtime/memory/cache_policy.h index 8f077a4eb45bb1..72e61fed2e0013 100644 --- a/be/src/runtime/memory/cache_policy.h +++ b/be/src/runtime/memory/cache_policy.h @@ -99,10 +99,10 @@ class CachePolicy { case CacheType::TABLET_COLUMN_OBJECT_POOL: return "TabletColumnObjectPool"; default: - LOG(FATAL) << "not match type of cache policy :" << static_cast(type); + throw Exception(Status::FatalError("not match type of cache policy :{}", + static_cast(type))); } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } inline static std::unordered_map StringToType = { diff --git a/be/src/runtime/memory/lru_cache_policy.h b/be/src/runtime/memory/lru_cache_policy.h index 3fdb43facd7715..d4c282dab8274e 100644 --- a/be/src/runtime/memory/lru_cache_policy.h +++ b/be/src/runtime/memory/lru_cache_policy.h @@ -90,7 +90,8 @@ class LRUCachePolicy : public CachePolicy { case LRUCacheType::NUMBER: return "number"; default: - LOG(FATAL) << "not match type of lru cache:" << static_cast(type); + throw Exception( + Status::FatalError("not match type of lru cache:{}", static_cast(type))); } } diff --git a/be/src/runtime/memory/thread_mem_tracker_mgr.h b/be/src/runtime/memory/thread_mem_tracker_mgr.h index db3b32a6298820..9dbf4399492d02 100644 --- a/be/src/runtime/memory/thread_mem_tracker_mgr.h +++ b/be/src/runtime/memory/thread_mem_tracker_mgr.h @@ -246,13 +246,13 @@ inline void ThreadMemTrackerMgr::consume(int64_t size, int skip_large_memory_che } if (doris::config::crash_in_alloc_large_memory_bytes > 0 && size > doris::config::crash_in_alloc_large_memory_bytes) { - LOG(FATAL) << fmt::format( + throw Exception(Status::FatalError( "alloc large memory: {}, {}, crash generate core dumpsto help analyze, " "stacktrace:\n{}", size, is_attach_query() ? "in query or load: " + print_id(_query_id) : "not in query or load", - get_stack_trace()); + get_stack_trace())); } } } diff --git a/be/src/runtime/snapshot_loader.cpp b/be/src/runtime/snapshot_loader.cpp index 784904c78a3fb1..b492a929fca3bf 100644 --- a/be/src/runtime/snapshot_loader.cpp +++ b/be/src/runtime/snapshot_loader.cpp @@ -74,7 +74,7 @@ Status upload_with_checksum(io::RemoteFileSystem& fs, std::string_view local_pat RETURN_IF_ERROR(fs.upload(local_path, full_remote_path)); break; default: - LOG(FATAL) << "unknown fs type: " << static_cast(fs.type()); + throw Exception(Status::FatalError("unknown fs type: {}", static_cast(fs.type()))); } return Status::OK(); } @@ -807,8 +807,7 @@ Status SnapshotLoader::move(const std::string& snapshot_path, TabletSharedPtr ta } } else { - LOG(FATAL) << "only support overwrite now"; - __builtin_unreachable(); + throw Exception(Status::FatalError("only support overwrite now")); } // snapshot loader not need to change tablet uid diff --git a/be/src/runtime/stream_load/stream_load_executor.cpp b/be/src/runtime/stream_load/stream_load_executor.cpp index ad4d22946f1b83..054de96a881425 100644 --- a/be/src/runtime/stream_load/stream_load_executor.cpp +++ b/be/src/runtime/stream_load/stream_load_executor.cpp @@ -390,8 +390,7 @@ bool StreamLoadExecutor::collect_load_stat(StreamLoadContext* ctx, TTxnCommitAtt } switch (ctx->load_type) { case TLoadType::MINI_LOAD: { - LOG(FATAL) << "mini load is not supported any more"; - break; + throw Exception(Status::FatalError("mini load is not supported any more")); } case TLoadType::ROUTINE_LOAD: { attach->loadType = TLoadType::ROUTINE_LOAD; diff --git a/be/src/runtime/thread_context.h b/be/src/runtime/thread_context.h index e0a44af69c1d66..9ba7949ec5afad 100644 --- a/be/src/runtime/thread_context.h +++ b/be/src/runtime/thread_context.h @@ -354,8 +354,7 @@ class ThreadLocalHandle { DCHECK(bthread_context != nullptr); bthread_context->thread_local_handle_count--; } else { - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } } }; @@ -379,8 +378,8 @@ static ThreadContext* thread_context(bool allow_return_null = false) { return nullptr; } // It means that use thread_context() but this thread not attached a query/load using SCOPED_ATTACH_TASK macro. - LOG(FATAL) << "__builtin_unreachable, " << doris::memory_orphan_check_msg; - __builtin_unreachable(); + throw Exception( + Status::FatalError("__builtin_unreachable, {}", doris::memory_orphan_check_msg)); } // belong to one query object member, not be shared by multiple queries. diff --git a/be/src/util/binary_cast.hpp b/be/src/util/binary_cast.hpp index 8a91ab3a579152..e7c62ad45ac091 100644 --- a/be/src/util/binary_cast.hpp +++ b/be/src/util/binary_cast.hpp @@ -137,8 +137,7 @@ To binary_cast(From from) { conv.decimal = from; return conv.i128; } else { - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } } diff --git a/be/src/util/bit_util.h b/be/src/util/bit_util.h index 504b0b27428190..5ec5a8bf8e1aa4 100644 --- a/be/src/util/bit_util.h +++ b/be/src/util/bit_util.h @@ -237,9 +237,7 @@ class BitUtil { } else if constexpr (std::is_same_v) { return value; } else { - __builtin_unreachable(); - LOG(FATAL) << "__builtin_unreachable"; - return value; + throw Exception(Status::FatalError("__builtin_unreachable")); } } diff --git a/be/src/util/bitmap_value.h b/be/src/util/bitmap_value.h index 2d15ac99611274..528dbe40788229 100644 --- a/be/src/util/bitmap_value.h +++ b/be/src/util/bitmap_value.h @@ -2519,8 +2519,7 @@ class BitmapValueIterator { } break; case BitmapValue::BitmapDataType::SET: { - LOG(FATAL) << "BitmapValue with set do not support move"; - break; + throw Exception(Status::FatalError("BitmapValue with set do not support move")); } default: break; diff --git a/be/src/util/block_compression.cpp b/be/src/util/block_compression.cpp index d1788b0948a6f2..7a0aacd4252dec 100644 --- a/be/src/util/block_compression.cpp +++ b/be/src/util/block_compression.cpp @@ -233,7 +233,8 @@ class HadoopLz4BlockCompression : public Lz4BlockCompression { HadoopLz4BlockCompression() { Status st = Decompressor::create_decompressor(CompressType::LZ4BLOCK, &_decompressor); if (!st.ok()) { - LOG(FATAL) << "HadoopLz4BlockCompression construction failed. status = " << st << "\n"; + throw Exception(Status::FatalError( + "HadoopLz4BlockCompression construction failed. status = {}", st)); } } diff --git a/be/src/util/easy_json.cc b/be/src/util/easy_json.cc index 46c3a1867f7b42..fcb8021e3836b2 100644 --- a/be/src/util/easy_json.cc +++ b/be/src/util/easy_json.cc @@ -27,6 +27,8 @@ #include #include #include + +#include "common/exception.h" // IWYU pragma: no_include using rapidjson::SizeType; @@ -200,8 +202,7 @@ EasyJson EasyJson::PushBack(EasyJson::ComplexTypeInitializer val) { } else if (val == kArray) { push_val.SetArray(); } else { - LOG(FATAL) << "Unknown initializer type"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Unknown initializer type")); } value_->PushBack(push_val, alloc_->allocator()); return EasyJson(&(*value_)[value_->Size() - 1], alloc_); diff --git a/be/src/util/jsonb_utils.h b/be/src/util/jsonb_utils.h index 7dba0dca3af1eb..8ec842ef227dd5 100644 --- a/be/src/util/jsonb_utils.h +++ b/be/src/util/jsonb_utils.h @@ -23,6 +23,7 @@ #include +#include "common/exception.h" #include "jsonb_document.h" #include "jsonb_stream.h" #include "jsonb_writer.h" @@ -42,7 +43,8 @@ class JsonbToJson { const std::string to_json_string(const char* data, size_t size) { JsonbDocument* pdoc = doris::JsonbDocument::createDocument(data, size); if (!pdoc) { - LOG(FATAL) << "invalid json binary value: " << std::string_view(data, size); + throw Exception(Status::FatalError("invalid json binary value: {}", + std::string_view(data, size))); } return to_json_string(pdoc->getValue()); } diff --git a/be/src/util/rle_encoding.h b/be/src/util/rle_encoding.h index 206349b472815d..5369ace9eed6ce 100644 --- a/be/src/util/rle_encoding.h +++ b/be/src/util/rle_encoding.h @@ -283,7 +283,7 @@ void RleDecoder::RewindOne() { switch (rewind_state_) { case CANT_REWIND: - LOG(FATAL) << "Can't rewind more than once after each read!"; + throw Exception(Status::FatalError("Can't rewind more than once after each read!")); break; case REWIND_RUN: ++repeat_count_; diff --git a/be/src/util/threadpool.cpp b/be/src/util/threadpool.cpp index f5ea38515def36..e9af13f556e143 100644 --- a/be/src/util/threadpool.cpp +++ b/be/src/util/threadpool.cpp @@ -27,6 +27,7 @@ #include #include +#include "common/exception.h" #include "common/logging.h" #include "gutil/map-util.h" #include "gutil/port.h" @@ -194,7 +195,7 @@ void ThreadPoolToken::transition(State new_state) { CHECK(false); // QUIESCED is a terminal state break; default: - LOG(FATAL) << "Unknown token state: " << _state; + throw Exception(Status::FatalError("Unknown token state: {}", _state)); } #endif @@ -616,10 +617,10 @@ Status ThreadPool::create_thread() { void ThreadPool::check_not_pool_thread_unlocked() { Thread* current = Thread::current_thread(); if (ContainsKey(_threads, current)) { - LOG(FATAL) << strings::Substitute( - "Thread belonging to thread pool '$0' with " - "name '$1' called pool function that would result in deadlock", - _name, current->name()); + throw Exception( + Status::FatalError("Thread belonging to thread pool {} with " + "name {} called pool function that would result in deadlock", + _name, current->name())); } } diff --git a/be/src/util/timezone_utils.cpp b/be/src/util/timezone_utils.cpp index 6bb71ac46471c9..a26ad3703b79b9 100644 --- a/be/src/util/timezone_utils.cpp +++ b/be/src/util/timezone_utils.cpp @@ -35,6 +35,7 @@ #include #include +#include "common/exception.h" #include "common/logging.h" #include "common/status.h" @@ -83,8 +84,7 @@ void TimezoneUtils::load_timezones_to_cache() { const auto root_path = fs::path {base_str}; if (!exists(root_path)) { - LOG(FATAL) << "Cannot find system tzfile. Doris exiting!"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Cannot find system tzfile. Doris exiting!")); } std::set ignore_paths = {"posix", "right"}; // duplications. ignore them. diff --git a/be/src/vec/aggregate_functions/aggregate_function_map.h b/be/src/vec/aggregate_functions/aggregate_function_map.h index 17bc54f7499adb..7273390e7c5342 100644 --- a/be/src/vec/aggregate_functions/aggregate_function_map.h +++ b/be/src/vec/aggregate_functions/aggregate_function_map.h @@ -40,10 +40,7 @@ struct AggregateFunctionMapAggData { using KeyType = std::conditional_t, StringRef, K>; using Map = phmap::flat_hash_map; - AggregateFunctionMapAggData() { - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); - } + AggregateFunctionMapAggData() { throw Exception(Status::FatalError("__builtin_unreachable")); } AggregateFunctionMapAggData(const DataTypes& argument_types) { _key_type = remove_nullable(argument_types[0]); diff --git a/be/src/vec/aggregate_functions/aggregate_function_reader_first_last.h b/be/src/vec/aggregate_functions/aggregate_function_reader_first_last.h index 8efea2dc6fc8e4..6f5d680d3eb0fc 100644 --- a/be/src/vec/aggregate_functions/aggregate_function_reader_first_last.h +++ b/be/src/vec/aggregate_functions/aggregate_function_reader_first_last.h @@ -238,24 +238,17 @@ class ReaderFunctionData final void add_range_single_place(int64_t partition_start, int64_t partition_end, int64_t frame_start, int64_t frame_end, AggregateDataPtr place, const IColumn** columns, Arena*) const override { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "ReaderFunctionData do not support add_range_single_place"); - __builtin_unreachable(); + throw doris::Exception( + Status::FatalError("ReaderFunctionData do not support add_range_single_place")); } void merge(AggregateDataPtr place, ConstAggregateDataPtr rhs, Arena*) const override { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "ReaderFunctionData do not support merge"); - __builtin_unreachable(); + throw doris::Exception(Status::FatalError("ReaderFunctionData do not support merge")); } void serialize(ConstAggregateDataPtr place, BufferWritable& buf) const override { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "ReaderFunctionData do not support serialize"); - __builtin_unreachable(); + throw doris::Exception(Status::FatalError("ReaderFunctionData do not support serialize")); } void deserialize(AggregateDataPtr place, BufferReadable& buf, Arena*) const override { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "ReaderFunctionData do not support deserialize"); - __builtin_unreachable(); + throw doris::Exception(Status::FatalError("ReaderFunctionData do not support deserialize")); } private: diff --git a/be/src/vec/aggregate_functions/aggregate_function_window.h b/be/src/vec/aggregate_functions/aggregate_function_window.h index 13fa8e74751df6..0cef4c82d3dbfe 100644 --- a/be/src/vec/aggregate_functions/aggregate_function_window.h +++ b/be/src/vec/aggregate_functions/aggregate_function_window.h @@ -563,24 +563,19 @@ class WindowFunctionData final void add(AggregateDataPtr place, const IColumn** columns, ssize_t row_num, Arena*) const override { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "WindowFunctionLeadLagData do not support add"); - __builtin_unreachable(); + throw doris::Exception(Status::FatalError("WindowFunctionLeadLagData do not support add")); } void merge(AggregateDataPtr place, ConstAggregateDataPtr rhs, Arena*) const override { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "WindowFunctionLeadLagData do not support merge"); - __builtin_unreachable(); + throw doris::Exception( + Status::FatalError("WindowFunctionLeadLagData do not support merge")); } void serialize(ConstAggregateDataPtr place, BufferWritable& buf) const override { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "WindowFunctionLeadLagData do not support serialize"); - __builtin_unreachable(); + throw doris::Exception( + Status::FatalError("WindowFunctionLeadLagData do not support serialize")); } void deserialize(AggregateDataPtr place, BufferReadable& buf, Arena*) const override { - throw doris::Exception(ErrorCode::INTERNAL_ERROR, - "WindowFunctionLeadLagData do not support deserialize"); - __builtin_unreachable(); + throw doris::Exception( + Status::FatalError("WindowFunctionLeadLagData do not support deserialize")); } private: diff --git a/be/src/vec/columns/column_string.cpp b/be/src/vec/columns/column_string.cpp index cb83a29bbada2c..db0088e67c27b6 100644 --- a/be/src/vec/columns/column_string.cpp +++ b/be/src/vec/columns/column_string.cpp @@ -40,16 +40,16 @@ template void ColumnStr::sanity_check() const { auto count = offsets.size(); if (chars.size() != offsets[count - 1]) { - LOG(FATAL) << "row count: " << count << ", chars.size(): " << chars.size() << ", offset[" - << count - 1 << "]: " << offsets[count - 1]; + throw Exception(Status::FatalError("row count: {}, chars.size(): {}, offset[{}]: ", count, + chars.size(), count - 1, offsets[count - 1])); } if (offsets[-1] != 0) { - LOG(FATAL) << "wrong offsets[-1]: " << offsets[-1]; + throw Exception(Status::FatalError("wrong offsets[-1]: {}", offsets[-1])); } for (size_t i = 0; i < count; ++i) { if (offsets[i] < offsets[i - 1]) { - LOG(FATAL) << "row count: " << count << ", offsets[" << i << "]: " << offsets[i] - << ", offsets[" << i - 1 << "]: " << offsets[i - 1]; + throw Exception(Status::FatalError("row count: {}, offsets[{}]: {}, offsets[{}]: {}", + count, i, offsets[i], i - 1, offsets[i - 1])); } } } diff --git a/be/src/vec/common/assert_cast.h b/be/src/vec/common/assert_cast.h index 02dce99e967bdb..1905983a58cc29 100644 --- a/be/src/vec/common/assert_cast.h +++ b/be/src/vec/common/assert_cast.h @@ -23,6 +23,7 @@ #include #include +#include "common/exception.h" #include "common/logging.h" #include "vec/common/demangle.h" @@ -45,35 +46,33 @@ PURE To assert_cast(From&& from) { if (auto ptr = dynamic_cast(from); ptr != nullptr) { return ptr; } - LOG(FATAL) << fmt::format("Bad cast from type:{}* to {}", - demangle(typeid(*from).name()), - demangle(typeid(To).name())); + throw doris::Exception(doris::Status::FatalError("Bad cast from type:{}* to {}", + demangle(typeid(*from).name()), + demangle(typeid(To).name()))); } } else { if (typeid(from) == typeid(To)) { return static_cast(from); } } - LOG(FATAL) << fmt::format("Bad cast from type:{} to {}", demangle(typeid(from).name()), - demangle(typeid(To).name())); - __builtin_unreachable(); + throw doris::Exception(doris::Status::FatalError("Bad cast from type:{} to {}", + demangle(typeid(from).name()), + demangle(typeid(To).name()))); }; #ifndef NDEBUG try { return perform_cast(std::forward(from)); } catch (const std::exception& e) { - LOG(FATAL) << "assert cast err:" << e.what(); + throw doris::Exception(doris::Status::FatalError("assert cast err:{}", e.what())); } - __builtin_unreachable(); #else if constexpr (check == TypeCheckOnRelease::ENABLE) { try { return perform_cast(std::forward(from)); } catch (const std::exception& e) { - LOG(FATAL) << "assert cast err:" << e.what(); + throw doris::Exception(doris::Status::FatalError("assert cast err:{}", e.what())); } - __builtin_unreachable(); } else { return static_cast(from); } diff --git a/be/src/vec/common/hash_table/string_hash_table.h b/be/src/vec/common/hash_table/string_hash_table.h index 74be1e85e1efe8..892598a83263b9 100644 --- a/be/src/vec/common/hash_table/string_hash_table.h +++ b/be/src/vec/common/hash_table/string_hash_table.h @@ -327,8 +327,7 @@ class StringHashTable : private boost::noncopyable { return iterator5 == rhs.iterator5; } } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw doris::Exception(doris::Status::FatalError("__builtin_unreachable")); } bool operator!=(const iterator_base& rhs) const { return !(*this == rhs); } diff --git a/be/src/vec/common/schema_util.cpp b/be/src/vec/common/schema_util.cpp index fd50af3e1fcd88..2b1c71c643d613 100644 --- a/be/src/vec/common/schema_util.cpp +++ b/be/src/vec/common/schema_util.cpp @@ -133,7 +133,7 @@ size_t get_size_of_interger(TypeIndex type) { case TypeIndex::UInt128: return sizeof(uint128_t); default: - LOG(FATAL) << "Unknown integer type: " << getTypeName(type); + throw Exception(Status::FatalError("Unknown integer type: {}", getTypeName(type))); return 0; } } @@ -231,8 +231,7 @@ void get_column_by_type(const vectorized::DataTypePtr& data_type, const std::str return; } // TODO handle more types like struct/date/datetime/decimal... - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } TabletColumn get_column_by_type(const vectorized::DataTypePtr& data_type, const std::string& name, diff --git a/be/src/vec/core/block.cpp b/be/src/vec/core/block.cpp index 4dc553b1a5790f..951c2661faf172 100644 --- a/be/src/vec/core/block.cpp +++ b/be/src/vec/core/block.cpp @@ -644,10 +644,10 @@ Block Block::clone_with_columns(const Columns& columns) const { size_t num_columns = data.size(); if (num_columns != columns.size()) { - LOG(FATAL) << fmt::format( + throw Exception(Status::FatalError( "Cannot clone block with columns because block has {} columns, but {} columns " "given.", - num_columns, columns.size()); + num_columns, columns.size())); } for (size_t i = 0; i < num_columns; ++i) { diff --git a/be/src/vec/core/decimal_comparison.h b/be/src/vec/core/decimal_comparison.h index 9e9d9ad399ae04..4503a264c28014 100644 --- a/be/src/vec/core/decimal_comparison.h +++ b/be/src/vec/core/decimal_comparison.h @@ -82,8 +82,9 @@ class DecimalComparison { DecimalComparison(Block& block, uint32_t result, const ColumnWithTypeAndName& col_left, const ColumnWithTypeAndName& col_right) { if (!apply(block, result, col_left, col_right)) { - LOG(FATAL) << fmt::format("Wrong decimal comparison with {} and {}", - col_left.type->get_name(), col_right.type->get_name()); + throw Exception(Status::FatalError("Wrong decimal comparison with {} and {}", + col_left.type->get_name(), + col_right.type->get_name())); } } @@ -106,8 +107,7 @@ class DecimalComparison { static bool compare(A a, B b, UInt32 scale_a, UInt32 scale_b) { static const UInt32 max_scale = max_decimal_precision(); if (scale_a > max_scale || scale_b > max_scale) { - LOG(FATAL) << "Bad scale of decimal field"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Bad scale of decimal field")); } Shift shift; @@ -213,8 +213,7 @@ class DecimalComparison { if (const ColVecB* c1_vec = check_and_get_column(c1.get())) constant_vector(a, c1_vec->get_data(), vec_res, scale); else { - LOG(FATAL) << "Wrong column in Decimal comparison"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Wrong column in Decimal comparison")); } } else if (c1_is_const) { const ColumnConst* c1_const = check_and_get_column_const(c1.get()); @@ -222,8 +221,7 @@ class DecimalComparison { if (const ColVecA* c0_vec = check_and_get_column(c0.get())) vector_constant(c0_vec->get_data(), b, vec_res, scale); else { - LOG(FATAL) << "Wrong column in Decimal comparison"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Wrong column in Decimal comparison")); } } else { if (const ColVecA* c0_vec = check_and_get_column(c0.get())) { @@ -231,12 +229,10 @@ class DecimalComparison { vector_vector(c0_vec->get_data(), c1_vec->get_data(), vec_res, scale); else { - LOG(FATAL) << "Wrong column in Decimal comparison"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Wrong column in Decimal comparison")); } } else { - LOG(FATAL) << "Wrong column in Decimal comparison"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Wrong column in Decimal comparison")); } } return c_res; @@ -262,8 +258,7 @@ class DecimalComparison { if constexpr (scale_right) overflow |= common::mul_overflow(y, scale, y); if (overflow) { - LOG(FATAL) << "Can't compare"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Can't compare")); } } else { if constexpr (scale_left) x *= scale; diff --git a/be/src/vec/core/field.h b/be/src/vec/core/field.h index 341f65e075ed11..1176840738a289 100644 --- a/be/src/vec/core/field.h +++ b/be/src/vec/core/field.h @@ -38,6 +38,7 @@ #include #include "common/compiler_util.h" // IWYU pragma: keep +#include "common/exception.h" #include "olap/hll.h" #include "util/bitmap_value.h" #include "util/quantile_state.h" @@ -168,7 +169,7 @@ class JsonbField { JsonbField(const char* ptr, size_t len) : size(len) { data = new char[size]; if (!data) { - LOG(FATAL) << "new data buffer failed, size: " << size; + throw Exception(Status::FatalError("new data buffer failed, size: {}", size)); } memcpy(data, ptr, size); } @@ -176,7 +177,7 @@ class JsonbField { JsonbField(const JsonbField& x) : size(x.size) { data = new char[size]; if (!data) { - LOG(FATAL) << "new data buffer failed, size: " << size; + throw Exception(Status::FatalError("new data buffer failed, size: {}", size)); } memcpy(data, x.data, size); } @@ -189,7 +190,7 @@ class JsonbField { JsonbField& operator=(const JsonbField& x) { data = new char[size]; if (!data) { - LOG(FATAL) << "new data buffer failed, size: " << size; + throw Exception(Status::FatalError("new data buffer failed, size: {}", size)); } memcpy(data, x.data, size); return *this; @@ -216,38 +217,30 @@ class JsonbField { size_t get_size() const { return size; } bool operator<(const JsonbField& r) const { - LOG(FATAL) << "comparing between JsonbField is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonbField is not supported")); } bool operator<=(const JsonbField& r) const { - LOG(FATAL) << "comparing between JsonbField is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonbField is not supported")); } bool operator==(const JsonbField& r) const { - LOG(FATAL) << "comparing between JsonbField is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonbField is not supported")); } bool operator>(const JsonbField& r) const { - LOG(FATAL) << "comparing between JsonbField is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonbField is not supported")); } bool operator>=(const JsonbField& r) const { - LOG(FATAL) << "comparing between JsonbField is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonbField is not supported")); } bool operator!=(const JsonbField& r) const { - LOG(FATAL) << "comparing between JsonbField is not supported"; - __builtin_unreachable(); + throw Exception(Status::FatalError("comparing between JsonbField is not supported")); } const JsonbField& operator+=(const JsonbField& r) { - LOG(FATAL) << "Not support plus opration on JsonbField"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Not support plus opration on JsonbField")); } const JsonbField& operator-=(const JsonbField& r) { - LOG(FATAL) << "Not support minus opration on JsonbField"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Not support minus opration on JsonbField")); } private: @@ -305,8 +298,7 @@ class DecimalField { const DecimalField& operator+=(const DecimalField& r) { if (scale != r.get_scale()) { - LOG(FATAL) << "Add different decimal fields"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Add different decimal fields")); } dec += r.get_value(); return *this; @@ -314,8 +306,7 @@ class DecimalField { const DecimalField& operator-=(const DecimalField& r) { if (scale != r.get_scale()) { - LOG(FATAL) << "Sub different decimal fields"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Sub different decimal fields")); } dec -= r.get_value(); return *this; @@ -422,8 +413,8 @@ class Field { case IPv6: return "IPv6"; default: - LOG(FATAL) << "type not supported, type=" << Types::to_string(which); - break; + throw Exception( + Status::FatalError("type not supported, type={}", Types::to_string(which))); } __builtin_unreachable(); } @@ -558,8 +549,9 @@ class Field { return which <=> rhs.which; } if (which != rhs.which) { - LOG(FATAL) << "lhs type not equal with rhs, lhs=" << Types::to_string(which) - << ", rhs=" << Types::to_string(rhs.which); + throw Exception(Status::FatalError("lhs type not equal with rhs, lhs={}, rhs={}", + Types::to_string(which), + Types::to_string(rhs.which))); } switch (which) { @@ -601,9 +593,9 @@ class Field { case Types::Decimal256: return get() <=> rhs.get(); default: - LOG(FATAL) << "lhs type not equal with rhs, lhs=" << Types::to_string(which) - << ", rhs=" << Types::to_string(rhs.which); - break; + throw Exception(Status::FatalError("lhs type not equal with rhs, lhs={}, rhs={}", + Types::to_string(which), + Types::to_string(rhs.which))); } } @@ -675,8 +667,8 @@ class Field { f(field.template get()); return; default: - LOG(FATAL) << "type not supported, type=" << Types::to_string(field.which); - break; + throw Exception(Status::FatalError("type not supported, type={}", + Types::to_string(field.which))); } } diff --git a/be/src/vec/core/types.h b/be/src/vec/core/types.h index c817c6ab273f42..223dc13c8182bd 100644 --- a/be/src/vec/core/types.h +++ b/be/src/vec/core/types.h @@ -942,8 +942,7 @@ inline const char* getTypeName(TypeIndex idx) { return "Time"; } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } // NOLINTEND(readability-function-size) } // namespace vectorized diff --git a/be/src/vec/data_types/data_type_number_base.cpp b/be/src/vec/data_types/data_type_number_base.cpp index 1afed3d7d1a394..55330bd2797772 100644 --- a/be/src/vec/data_types/data_type_number_base.cpp +++ b/be/src/vec/data_types/data_type_number_base.cpp @@ -158,8 +158,7 @@ Field DataTypeNumberBase::get_field(const TExprNode& node) const { if constexpr (std::is_same_v, TypeId>) { return Float64(node.float_literal.value); } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } template diff --git a/be/src/vec/data_types/data_type_number_base.h b/be/src/vec/data_types/data_type_number_base.h index a73bd9951891a3..c560fdd01adac3 100644 --- a/be/src/vec/data_types/data_type_number_base.h +++ b/be/src/vec/data_types/data_type_number_base.h @@ -125,8 +125,7 @@ class DataTypeNumberBase : public IDataType { if constexpr (std::is_same_v, TypeId>) { return doris::FieldType::OLAP_FIELD_TYPE_DOUBLE; } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } Field get_default() const override; diff --git a/be/src/vec/data_types/serde/data_type_serde.h b/be/src/vec/data_types/serde/data_type_serde.h index 1a089bb73fe99c..122a700cf9b20b 100644 --- a/be/src/vec/data_types/serde/data_type_serde.h +++ b/be/src/vec/data_types/serde/data_type_serde.h @@ -395,8 +395,9 @@ inline static NullMap revert_null_map(const NullMap* null_bytemap, size_t start, inline void checkArrowStatus(const arrow::Status& status, const std::string& column, const std::string& format_name) { if (!status.ok()) { - LOG(FATAL) << "arrow serde with arrow: " << format_name << " with column : " << column - << " with error msg: " << status.ToString(); + throw Exception( + Status::FatalError("arrow serde with arrow: {} with column : {} with error msg: {}", + format_name, column, status.ToString())); } } diff --git a/be/src/vec/exec/format/parquet/bool_rle_decoder.cpp b/be/src/vec/exec/format/parquet/bool_rle_decoder.cpp index 17ce68e604e9b8..3f46a9c0073568 100644 --- a/be/src/vec/exec/format/parquet/bool_rle_decoder.cpp +++ b/be/src/vec/exec/format/parquet/bool_rle_decoder.cpp @@ -36,15 +36,16 @@ void BoolRLEDecoder::set_data(Slice* slice) { _offset = 0; _current_value_idx = 0; if (_num_bytes < 4) { - LOG(FATAL) << "Received invalid length : " + std::to_string(_num_bytes) + - " (corrupt data page?)"; + throw Exception(Status::FatalError("Received invalid length : {} (corrupt data page?)", + std::to_string(_num_bytes))); } // Load the first 4 bytes in little-endian, which indicates the length const uint8_t* data = reinterpret_cast(_data->data); uint32_t num_bytes = decode_fixed32_le(data); if (num_bytes > static_cast(_num_bytes - 4)) { - LOG(FATAL) << ("Received invalid number of bytes : " + std::to_string(num_bytes) + - " (corrupt data page?)"); + throw Exception( + Status::FatalError("Received invalid number of bytes : {} (corrupt data page?)", + std::to_string(_num_bytes))); } _num_bytes = num_bytes; auto decoder_data = data + 4; diff --git a/be/src/vec/exec/format/parquet/decoder.h b/be/src/vec/exec/format/parquet/decoder.h index 1654878af80a29..06e131b5b56049 100644 --- a/be/src/vec/exec/format/parquet/decoder.h +++ b/be/src/vec/exec/format/parquet/decoder.h @@ -79,8 +79,8 @@ class Decoder { } virtual MutableColumnPtr convert_dict_column_to_string_column(const ColumnInt32* dict_column) { - LOG(FATAL) << "Method convert_dict_column_to_string_column is not supported"; - __builtin_unreachable(); + throw doris::Exception(ErrorCode::NOT_IMPLEMENTED_ERROR, + "Method convert_dict_column_to_string_column is not supported"); } protected: diff --git a/be/src/vec/exec/format/parquet/delta_bit_pack_decoder.h b/be/src/vec/exec/format/parquet/delta_bit_pack_decoder.h index 9497aa1cb1cdb5..dbe90acc985a4d 100644 --- a/be/src/vec/exec/format/parquet/delta_bit_pack_decoder.h +++ b/be/src/vec/exec/format/parquet/delta_bit_pack_decoder.h @@ -177,7 +177,8 @@ class DeltaBitPackDecoder final : public DeltaDecoder { _bit_reader.reset(new BitReader((const uint8_t*)slice->data, slice->size)); Status st = _init_header(); if (!st.ok()) { - LOG(FATAL) << "Fail to init delta encoding header for " << st.to_string(); + throw Exception(Status::FatalError("Fail to init delta encoding header for {}", + st.to_string())); } _data = slice; _offset = 0; @@ -189,7 +190,8 @@ class DeltaBitPackDecoder final : public DeltaDecoder { _bit_reader = std::move(bit_reader); Status st = _init_header(); if (!st.ok()) { - LOG(FATAL) << "Fail to init delta encoding header for " << st.to_string(); + throw Exception(Status::FatalError("Fail to init delta encoding header for {}", + st.to_string())); } } @@ -345,7 +347,7 @@ class DeltaByteArrayDecoder : public DeltaDecoder { int ret; Status st = _prefix_len_decoder.decode(_buffered_prefix_length.data(), num_prefix, &ret); if (!st.ok()) { - LOG(FATAL) << "Fail to decode delta prefix, status: " << st; + throw Exception(Status::FatalError("Fail to decode delta prefix, status: {}", st)); } DCHECK_EQ(ret, num_prefix); _prefix_len_offset = 0; @@ -527,7 +529,7 @@ void DeltaLengthByteArrayDecoder::_decode_lengths() { int ret; Status st = _len_decoder.decode(_buffered_length.data(), num_length, &ret); if (!st.ok()) { - LOG(FATAL) << "Fail to decode delta length, status: " << st; + throw Exception(Status::FatalError("Fail to decode delta length, status: {}", st)); } DCHECK_EQ(ret, num_length); _length_idx = 0; diff --git a/be/src/vec/exec/format/parquet/parquet_column_convert.h b/be/src/vec/exec/format/parquet/parquet_column_convert.h index cf6f8aa13fa1d1..d35a69ff59c625 100644 --- a/be/src/vec/exec/format/parquet/parquet_column_convert.h +++ b/be/src/vec/exec/format/parquet/parquet_column_convert.h @@ -423,8 +423,7 @@ class FixedSizeToDecimal : public PhysicalToLogicalConverter { switch (_type_length) { APPLY_FOR_DECIMALS() default: - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } return Status::OK(); #undef APPLY_FOR_DECIMALS @@ -456,8 +455,7 @@ class FixedSizeToDecimal : public PhysicalToLogicalConverter { } else if constexpr (ScaleType == DecimalScaleParams::NO_SCALE) { // do nothing } else { - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } auto& v = reinterpret_cast(data[start_idx + i]); v = (DecimalType)value; @@ -501,8 +499,7 @@ class StringToDecimal : public PhysicalToLogicalConverter { } else if constexpr (ScaleType == DecimalScaleParams::NO_SCALE) { // do nothing } else { - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } } auto& v = reinterpret_cast(data[start_idx + i]); diff --git a/be/src/vec/exec/format/parquet/vparquet_column_reader.h b/be/src/vec/exec/format/parquet/vparquet_column_reader.h index 4c6e5b1eac9f60..a8062d2d9f9b7c 100644 --- a/be/src/vec/exec/format/parquet/vparquet_column_reader.h +++ b/be/src/vec/exec/format/parquet/vparquet_column_reader.h @@ -129,8 +129,8 @@ class ParquetColumnReader { } virtual MutableColumnPtr convert_dict_column_to_string_column(const ColumnInt32* dict_column) { - LOG(FATAL) << "Method convert_dict_column_to_string_column is not supported"; - __builtin_unreachable(); + throw Exception( + Status::FatalError("Method convert_dict_column_to_string_column is not supported")); } static Status create(io::FileReaderSPtr file, FieldSchema* field, diff --git a/be/src/vec/exec/jni_connector.cpp b/be/src/vec/exec/jni_connector.cpp index a87ccf987ac7af..11a58e81c98d89 100644 --- a/be/src/vec/exec/jni_connector.cpp +++ b/be/src/vec/exec/jni_connector.cpp @@ -185,8 +185,8 @@ Status JniConnector::close() { jthrowable exc = (env)->ExceptionOccurred(); if (exc != nullptr) { // Ensure successful resource release - LOG(FATAL) << "Failed to release jni resource: " - << JniUtil::GetJniExceptionMsg(env).to_string(); + throw Exception(Status::FatalError("Failed to release jni resource: {}", + JniUtil::GetJniExceptionMsg(env).to_string())); } } return Status::OK(); diff --git a/be/src/vec/exec/scan/split_source_connector.h b/be/src/vec/exec/scan/split_source_connector.h index 8f38cd4f17a18f..abe59562578aaf 100644 --- a/be/src/vec/exec/scan/split_source_connector.h +++ b/be/src/vec/exec/scan/split_source_connector.h @@ -117,7 +117,8 @@ class LocalSplitSourceConnector : public SplitSourceConnector { // for compatibility. return &_scan_ranges[0].scan_range.ext_scan_range.file_scan_range.params; } - LOG(FATAL) << "Unreachable, params is got by file_scan_range_params_map"; + throw Exception( + Status::FatalError("Unreachable, params is got by file_scan_range_params_map")); } }; @@ -160,7 +161,8 @@ class RemoteSplitSourceConnector : public SplitSourceConnector { int num_scan_ranges() override { return _num_splits; } TFileScanRangeParams* get_params() override { - LOG(FATAL) << "Unreachable, params is got by file_scan_range_params_map"; + throw Exception( + Status::FatalError("Unreachable, params is got by file_scan_range_params_map")); } }; diff --git a/be/src/vec/exprs/vexpr.h b/be/src/vec/exprs/vexpr.h index 953fbaa9c38c8d..91786337244013 100644 --- a/be/src/vec/exprs/vexpr.h +++ b/be/src/vec/exprs/vexpr.h @@ -237,18 +237,18 @@ class VExpr { // If this expr is a BloomPredicate, this method will return a BloomFilterFunc virtual std::shared_ptr get_bloom_filter_func() const { - LOG(FATAL) << "Method 'get_bloom_filter_func()' is not supported in expression: " - << this->debug_string(); - return nullptr; + throw Exception(Status::FatalError( + "Method 'get_bloom_filter_func()' is not supported in expression: {}", + this->debug_string())); } virtual std::shared_ptr get_set_func() const { return nullptr; } // If this expr is a BitmapPredicate, this method will return a BitmapFilterFunc virtual std::shared_ptr get_bitmap_filter_func() const { - LOG(FATAL) << "Method 'get_bitmap_filter_func()' is not supported in expression: " - << this->debug_string(); - return nullptr; + throw Exception(Status::FatalError( + "Method 'get_bitmap_filter_func()' is not supported in expression: {}", + this->debug_string())); } // fast_execute can direct copy expr filter result which build by apply index in segment_iterator diff --git a/be/src/vec/functions/array/function_array_apply.cpp b/be/src/vec/functions/array/function_array_apply.cpp index 75425389dd975c..4161441080aac0 100644 --- a/be/src/vec/functions/array/function_array_apply.cpp +++ b/be/src/vec/functions/array/function_array_apply.cpp @@ -24,6 +24,7 @@ #include #include +#include "common/exception.h" #include "common/status.h" #include "runtime/thread_context.h" #include "vec/aggregate_functions/aggregate_function.h" @@ -130,8 +131,7 @@ class FunctionArrayApply : public IFunction { if constexpr (op == ApplyOp::GE) { return data >= comp; } - LOG(FATAL) << "__builtin_unreachable"; - __builtin_unreachable(); + throw Exception(Status::FatalError("__builtin_unreachable")); } // need exception safety diff --git a/be/src/vec/functions/function_cast.h b/be/src/vec/functions/function_cast.h index 48619ff85f83c8..af9e9d19267073 100644 --- a/be/src/vec/functions/function_cast.h +++ b/be/src/vec/functions/function_cast.h @@ -665,7 +665,14 @@ struct ConvertImplNumberToJsonb { } else if constexpr (std::is_same_v) { writer.writeDouble(data[i]); } else { - LOG(FATAL) << "unsupported type "; + static_assert(std::is_same_v || + std::is_same_v || + std::is_same_v || + std::is_same_v || + std::is_same_v || + std::is_same_v || + std::is_same_v, + "unsupported type"); __builtin_unreachable(); } column_string->insert_data(writer.getOutput()->getBuffer(), @@ -950,8 +957,7 @@ struct ConvertImplFromJsonb { res[i] = 0; } } else { - LOG(FATAL) << "unsupported type "; - __builtin_unreachable(); + throw Exception(Status::FatalError("unsupported type")); } } diff --git a/be/src/vec/json/simd_json_parser.h b/be/src/vec/json/simd_json_parser.h index 5189e93563cc52..79924a12a3a4ff 100644 --- a/be/src/vec/json/simd_json_parser.h +++ b/be/src/vec/json/simd_json_parser.h @@ -208,8 +208,8 @@ class SimdJSONParser { /// Optional: Allocates memory to parse JSON documents faster. void reserve(size_t max_size) { if (parser.allocate(max_size) != simdjson::error_code::SUCCESS) { - LOG(FATAL) << "Couldn't allocate " + std::to_string(max_size) + - " bytes when parsing JSON"; + throw Exception(Status::FatalError("Couldn't allocate {} bytes when parsing JSON", + std::to_string(max_size))); } } diff --git a/be/src/vec/olap/olap_data_convertor.h b/be/src/vec/olap/olap_data_convertor.h index 3473d9d26b5205..75aff7dfec34cd 100644 --- a/be/src/vec/olap/olap_data_convertor.h +++ b/be/src/vec/olap/olap_data_convertor.h @@ -455,7 +455,8 @@ class OlapBlockDataConvertor { const void* get_data() const override { return _results.data(); }; const void* get_data_at(size_t offset) const override { - LOG(FATAL) << "now not support get_data_at for OlapColumnDataConvertorArray"; + throw doris::Exception(ErrorCode::NOT_IMPLEMENTED_ERROR, + "now not support get_data_at for OlapColumnDataConvertorArray"); __builtin_unreachable(); }; Status convert_to_olap() override; @@ -484,7 +485,8 @@ class OlapBlockDataConvertor { Status convert_to_olap() override; const void* get_data() const override { return _results.data(); }; const void* get_data_at(size_t offset) const override { - LOG(FATAL) << "now not support get_data_at for OlapColumnDataConvertorMap"; + throw doris::Exception(ErrorCode::NOT_IMPLEMENTED_ERROR, + "now not support get_data_at for OlapColumnDataConvertorMap"); __builtin_unreachable(); }; diff --git a/be/src/vec/runtime/vdatetime_value.cpp b/be/src/vec/runtime/vdatetime_value.cpp index 86c50f0936f30d..026648319d4be4 100644 --- a/be/src/vec/runtime/vdatetime_value.cpp +++ b/be/src/vec/runtime/vdatetime_value.cpp @@ -3434,8 +3434,7 @@ void DateV2Value::unchecked_set_time(uint8_t hour, uint8_t minute, uint16_t s date_v2_value_.second_ = second; date_v2_value_.microsecond_ = microsecond; } else { - LOG(FATAL) << "Invalid operation 'set_time' for date!"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Invalid operation 'set_time' for date!")); } } @@ -3444,8 +3443,7 @@ void DateV2Value::set_microsecond(uint64_t microsecond) { if constexpr (is_datetime) { date_v2_value_.microsecond_ = microsecond; } else { - LOG(FATAL) << "Invalid operation 'set_microsecond' for date!"; - __builtin_unreachable(); + throw Exception(Status::FatalError("Invalid operation 'set_microsecond' for date!")); } } diff --git a/be/test/util/threadpool_test.cpp b/be/test/util/threadpool_test.cpp index 3859639539dbb7..d331bd0d2ac25d 100644 --- a/be/test/util/threadpool_test.cpp +++ b/be/test/util/threadpool_test.cpp @@ -42,6 +42,7 @@ #include "common/logging.h" #include "common/status.h" +#include "gtest/gtest.h" #include "gtest/gtest_pred_impl.h" #include "gutil/strings/substitute.h" #include "util/barrier.h" From 6c57c3c70dc669ba386782bfa85baa096129852c Mon Sep 17 00:00:00 2001 From: zclllyybb Date: Thu, 19 Dec 2024 22:00:18 +0800 Subject: [PATCH 15/82] [fix](ub) Dont throw in noexcept function (#45672) --- be/src/pipeline/exec/exchange_sink_buffer.h | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/be/src/pipeline/exec/exchange_sink_buffer.h b/be/src/pipeline/exec/exchange_sink_buffer.h index a381c5aff144f3..458c7c3f66e3ee 100644 --- a/be/src/pipeline/exec/exchange_sink_buffer.h +++ b/be/src/pipeline/exec/exchange_sink_buffer.h @@ -155,9 +155,10 @@ class ExchangeSendCallback : public ::doris::DummyBrpcCallback { start_rpc_time); } } catch (const std::exception& exp) { - throw Exception(Status::FatalError("brpc callback error: {}", exp.what())); + LOG(FATAL) << "brpc callback error: " << exp.what(); } catch (...) { - throw Exception(Status::FatalError("brpc callback error.")); + LOG(FATAL) << "brpc callback error."; + __builtin_unreachable(); } } int64_t start_rpc_time; From b5249a9e47149bd5ff33c25933a9ae882c6e6b45 Mon Sep 17 00:00:00 2001 From: zhengyu Date: Fri, 20 Dec 2024 00:08:33 +0800 Subject: [PATCH 16/82] [opt](cloud) reduce cache hotspot table write amplification (#45557) 1. batch insert cloud_cache_hotspot in FE 2. enlarge polling interval in FE 3. shrink bucket num to 1 for cloud_cache_hotspot table 4. ignore stable statistics only catch the dynamic in BE Signed-off-by: zhengyu --- be/src/cloud/cloud_tablet_hotspot.cpp | 94 +++++++++++-------- be/src/cloud/cloud_tablet_hotspot.h | 19 ++++ .../java/org/apache/doris/common/Config.java | 4 +- .../doris/cloud/CacheHotspotManager.java | 2 +- .../doris/cloud/CacheHotspotManagerUtils.java | 3 +- 5 files changed, 80 insertions(+), 42 deletions(-) diff --git a/be/src/cloud/cloud_tablet_hotspot.cpp b/be/src/cloud/cloud_tablet_hotspot.cpp index dd197268646fbc..6391a2dc5c4928 100644 --- a/be/src/cloud/cloud_tablet_hotspot.cpp +++ b/be/src/cloud/cloud_tablet_hotspot.cpp @@ -57,18 +57,55 @@ TabletHotspot::~TabletHotspot() { } } -struct MapKeyHash { - int64_t operator()(const std::pair& key) const { - return std::hash {}(key.first) + std::hash {}(key.second); +void get_return_partitions( + const std::unordered_map, MapKeyHash>& + hot_partition, + const std::unordered_map, MapKeyHash>& + last_hot_partition, + std::vector* hot_tables, int& return_partitions, int N) { + for (const auto& [key, partition_to_value] : hot_partition) { + THotTableMessage msg; + msg.table_id = key.first; + msg.index_id = key.second; + for (const auto& [partition_id, value] : partition_to_value) { + if (return_partitions > N) { + return; + } + auto last_value_iter = last_hot_partition.find(key); + if (last_value_iter != last_hot_partition.end()) { + auto last_partition_iter = last_value_iter->second.find(partition_id); + if (last_partition_iter != last_value_iter->second.end()) { + const auto& last_value = last_partition_iter->second; + if (std::abs(static_cast(value.qpd) - + static_cast(last_value.qpd)) < 5 && + std::abs(static_cast(value.qpw) - + static_cast(last_value.qpw)) < 10 && + std::abs(static_cast(value.last_access_time) - + static_cast(last_value.last_access_time)) < 60) { + LOG(INFO) << "skip partition_id=" << partition_id << " qpd=" << value.qpd + << " qpw=" << value.qpw + << " last_access_time=" << value.last_access_time + << " last_qpd=" << last_value.qpd + << " last_qpw=" << last_value.qpw + << " last_access_time=" << last_value.last_access_time; + continue; + } + } + } + THotPartition hot_partition; + hot_partition.__set_partition_id(partition_id); + hot_partition.__set_query_per_day(value.qpd); + hot_partition.__set_query_per_week(value.qpw); + hot_partition.__set_last_access_time(value.last_access_time); + msg.hot_partitions.push_back(hot_partition); + return_partitions++; + } + msg.__isset.hot_partitions = !msg.hot_partitions.empty(); + hot_tables->push_back(std::move(msg)); } -}; -struct TabletHotspotMapValue { - uint64_t qpd = 0; // query per day - uint64_t qpw = 0; // query per week - int64_t last_access_time; -}; - -using TabletHotspotMapKey = std::pair; +} void TabletHotspot::get_top_n_hot_partition(std::vector* hot_tables) { // map, map> for day @@ -108,33 +145,14 @@ void TabletHotspot::get_top_n_hot_partition(std::vector* hot_t }); constexpr int N = 50; int return_partitions = 0; - auto get_return_partitions = - [=, &return_partitions]( - const std::unordered_map, - MapKeyHash>& hot_partition) { - for (const auto& [key, partition_to_value] : hot_partition) { - THotTableMessage msg; - msg.table_id = key.first; - msg.index_id = key.second; - for (const auto& [partition_id, value] : partition_to_value) { - if (return_partitions > N) { - return; - } - THotPartition hot_partition; - hot_partition.__set_partition_id(partition_id); - hot_partition.__set_query_per_day(value.qpd); - hot_partition.__set_query_per_week(value.qpw); - hot_partition.__set_last_access_time(value.last_access_time); - msg.hot_partitions.push_back(hot_partition); - return_partitions++; - } - msg.__isset.hot_partitions = !msg.hot_partitions.empty(); - hot_tables->push_back(std::move(msg)); - } - }; - get_return_partitions(day_hot_partitions); - get_return_partitions(week_hot_partitions); + + get_return_partitions(day_hot_partitions, _last_day_hot_partitions, hot_tables, + return_partitions, N); + get_return_partitions(week_hot_partitions, _last_week_hot_partitions, hot_tables, + return_partitions, N); + + _last_day_hot_partitions = std::move(day_hot_partitions); + _last_week_hot_partitions = std::move(week_hot_partitions); } void HotspotCounter::make_dot_point() { diff --git a/be/src/cloud/cloud_tablet_hotspot.h b/be/src/cloud/cloud_tablet_hotspot.h index af98f99a558b9b..0be1c085a6c990 100644 --- a/be/src/cloud/cloud_tablet_hotspot.h +++ b/be/src/cloud/cloud_tablet_hotspot.h @@ -49,6 +49,19 @@ struct HotspotCounter { }; using HotspotCounterPtr = std::shared_ptr; +using TabletHotspotMapKey = std::pair; + +struct TabletHotspotMapValue { + uint64_t qpd = 0; // query per day + uint64_t qpw = 0; // query per week + int64_t last_access_time; +}; + +struct MapKeyHash { + int64_t operator()(const std::pair& key) const { + return std::hash {}(key.first) + std::hash {}(key.second); + } +}; class TabletHotspot { public: @@ -71,6 +84,12 @@ class TabletHotspot { bool _closed {false}; std::mutex _mtx; std::condition_variable _cond; + std::unordered_map, + MapKeyHash> + _last_day_hot_partitions; + std::unordered_map, + MapKeyHash> + _last_week_hot_partitions; }; } // namespace doris diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java index c601a492162958..935300dee6f2fd 100644 --- a/fe/fe-common/src/main/java/org/apache/doris/common/Config.java +++ b/fe/fe-common/src/main/java/org/apache/doris/common/Config.java @@ -3190,11 +3190,11 @@ public static int metaServiceRpcRetryTimes() { public static boolean enable_fetch_cluster_cache_hotspot = true; @ConfField(mutable = true) - public static long fetch_cluster_cache_hotspot_interval_ms = 600000; + public static long fetch_cluster_cache_hotspot_interval_ms = 3600000; // to control the max num of values inserted into cache hotspot internal table // insert into cache table when the size of batch values reaches this limit @ConfField(mutable = true) - public static long batch_insert_cluster_cache_hotspot_num = 1000; + public static long batch_insert_cluster_cache_hotspot_num = 5000; /** * intervals between be status checks for CloudUpgradeMgr diff --git a/fe/fe-core/src/main/java/org/apache/doris/cloud/CacheHotspotManager.java b/fe/fe-core/src/main/java/org/apache/doris/cloud/CacheHotspotManager.java index 0b83baa94d6d4a..f4c7392eb75c63 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/cloud/CacheHotspotManager.java +++ b/fe/fe-core/src/main/java/org/apache/doris/cloud/CacheHotspotManager.java @@ -159,9 +159,9 @@ public void runAfterCatalogReady() { } }); } - triggerBatchInsert(); }); }); + triggerBatchInsert(); idToTable.clear(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/cloud/CacheHotspotManagerUtils.java b/fe/fe-core/src/main/java/org/apache/doris/cloud/CacheHotspotManagerUtils.java index 20de42f8cdc25a..72710debaefd9a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/cloud/CacheHotspotManagerUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/cloud/CacheHotspotManagerUtils.java @@ -70,9 +70,10 @@ public class CacheHotspotManagerUtils { + " last_access_time DATETIMEV2)\n" + " UNIQUE KEY(cluster_id, backend_id, table_id, index_id, partition_id, insert_day)\n" + " PARTITION BY RANGE (insert_day) ()\n" - + " DISTRIBUTED BY HASH (cluster_id)\n" + + " DISTRIBUTED BY HASH (cluster_id) BUCKETS 1\n" + " PROPERTIES (\n" + " \"dynamic_partition.enable\" = \"true\",\n" + + " \"dynamic_partition.buckets\" = \"1\",\n" + " \"dynamic_partition.time_unit\" = \"DAY\",\n" + " \"dynamic_partition.start\" = \"-7\",\n" + " \"dynamic_partition.end\" = \"3\",\n" From 1bd8003a1a126e8ea033b9452499da404b2ec0be Mon Sep 17 00:00:00 2001 From: Luwei Date: Fri, 20 Dec 2024 00:28:51 +0800 Subject: [PATCH 17/82] [Enhancement](compaction) enable the compaction producer to generate multiple compaction tasks in a single run (#45411) --- be/src/common/config.cpp | 2 + be/src/common/config.h | 2 + be/src/olap/tablet_manager.cpp | 52 ++++++++++++++++++--- be/test/olap/tablet_mgr_test.cpp | 79 +++++++++++++++++++++++++++++++- 4 files changed, 127 insertions(+), 8 deletions(-) diff --git a/be/src/common/config.cpp b/be/src/common/config.cpp index 95a3e61fb5517a..083b9f06c9491d 100644 --- a/be/src/common/config.cpp +++ b/be/src/common/config.cpp @@ -1404,6 +1404,8 @@ DEFINE_Bool(enable_table_size_correctness_check, "false"); DEFINE_Bool(force_regenerate_rowsetid_on_start_error, "false"); DEFINE_mBool(enable_sleep_between_delete_cumu_compaction, "false"); +DEFINE_mInt32(compaction_num_per_round, "1"); + // clang-format off #ifdef BE_TEST // test s3 diff --git a/be/src/common/config.h b/be/src/common/config.h index f8a9c3f7480b33..1e3d57ff763417 100644 --- a/be/src/common/config.h +++ b/be/src/common/config.h @@ -1490,6 +1490,8 @@ DECLARE_Bool(enable_table_size_correctness_check); // Enable sleep 5s between delete cumulative compaction. DECLARE_mBool(enable_sleep_between_delete_cumu_compaction); +DECLARE_mInt32(compaction_num_per_round); + #ifdef BE_TEST // test s3 DECLARE_String(test_s3_resource); diff --git a/be/src/olap/tablet_manager.cpp b/be/src/olap/tablet_manager.cpp index 33fee7ca350900..44c26d160eb8bc 100644 --- a/be/src/olap/tablet_manager.cpp +++ b/be/src/olap/tablet_manager.cpp @@ -719,6 +719,11 @@ void TabletManager::get_tablet_stat(TTabletStatResult* result) { result->__set_tablet_stat_list(*local_cache); } +struct TabletScore { + TabletSharedPtr tablet_ptr; + int score; +}; + std::vector TabletManager::find_best_tablets_to_compaction( CompactionType compaction_type, DataDir* data_dir, const std::unordered_set& tablet_submitted_compaction, uint32_t* score, @@ -732,6 +737,9 @@ std::vector TabletManager::find_best_tablets_to_compaction( uint32_t single_compact_highest_score = 0; TabletSharedPtr best_tablet; TabletSharedPtr best_single_compact_tablet; + auto cmp = [](TabletScore left, TabletScore right) { return left.score > right.score; }; + std::priority_queue, decltype(cmp)> top_tablets(cmp); + auto handler = [&](const TabletSharedPtr& tablet_ptr) { if (tablet_ptr->tablet_meta()->tablet_schema()->disable_auto_compaction()) { LOG_EVERY_N(INFO, 500) << "Tablet " << tablet_ptr->tablet_id() @@ -798,13 +806,33 @@ std::vector TabletManager::find_best_tablets_to_compaction( } } - // tablet should do cumu or base compaction - if (current_compaction_score > highest_score && !tablet_ptr->should_fetch_from_peer()) { - bool ret = tablet_ptr->suitable_for_compaction(compaction_type, - cumulative_compaction_policy); - if (ret) { - highest_score = current_compaction_score; - best_tablet = tablet_ptr; + if (config::compaction_num_per_round > 1 && !tablet_ptr->should_fetch_from_peer()) { + TabletScore ts; + ts.score = current_compaction_score; + ts.tablet_ptr = tablet_ptr; + if ((top_tablets.size() >= config::compaction_num_per_round && + current_compaction_score > top_tablets.top().score) || + top_tablets.size() < config::compaction_num_per_round) { + bool ret = tablet_ptr->suitable_for_compaction(compaction_type, + cumulative_compaction_policy); + if (ret) { + top_tablets.push(ts); + if (top_tablets.size() > config::compaction_num_per_round) { + top_tablets.pop(); + } + if (current_compaction_score > highest_score) { + highest_score = current_compaction_score; + } + } + } + } else { + if (current_compaction_score > highest_score && !tablet_ptr->should_fetch_from_peer()) { + bool ret = tablet_ptr->suitable_for_compaction(compaction_type, + cumulative_compaction_policy); + if (ret) { + highest_score = current_compaction_score; + best_tablet = tablet_ptr; + } } } }; @@ -820,6 +848,16 @@ std::vector TabletManager::find_best_tablets_to_compaction( picked_tablet.emplace_back(std::move(best_tablet)); } + std::vector reverse_top_tablets; + while (!top_tablets.empty()) { + reverse_top_tablets.emplace_back(top_tablets.top().tablet_ptr); + top_tablets.pop(); + } + + for (auto it = reverse_top_tablets.rbegin(); it != reverse_top_tablets.rend(); ++it) { + picked_tablet.emplace_back(*it); + } + // pick single compaction tablet needs the highest score if (best_single_compact_tablet != nullptr && single_compact_highest_score >= highest_score) { VLOG_CRITICAL << "Found the best tablet for single compaction. " diff --git a/be/test/olap/tablet_mgr_test.cpp b/be/test/olap/tablet_mgr_test.cpp index 1bcdcdf45c6906..a2551543405328 100644 --- a/be/test/olap/tablet_mgr_test.cpp +++ b/be/test/olap/tablet_mgr_test.cpp @@ -83,6 +83,7 @@ class TabletMgrTest : public testing::Test { SAFE_DELETE(_data_dir); EXPECT_TRUE(io::global_local_filesystem()->delete_directory(_engine_data_path).ok()); _tablet_mgr = nullptr; + config::compaction_num_per_round = 1; } std::unique_ptr k_engine; @@ -463,11 +464,87 @@ TEST_F(TabletMgrTest, FindTabletWithCompact) { ASSERT_EQ(score, 25); // drop all tablets - for (int64_t id = 1; id <= 20; ++id) { + for (int64_t id = 1; id <= 21; ++id) { Status drop_st = _tablet_mgr->drop_tablet(id, id * 10, false); ASSERT_TRUE(drop_st.ok()) << drop_st; } + { + config::compaction_num_per_round = 10; + for (int64_t i = 1; i <= 100; ++i) { + create_tablet(10000 + i, false, i); + } + + compact_tablets = _tablet_mgr->find_best_tablets_to_compaction( + CompactionType::CUMULATIVE_COMPACTION, _data_dir, cumu_set, &score, + cumulative_compaction_policies); + ASSERT_EQ(compact_tablets.size(), 10); + int index = 0; + for (auto t : compact_tablets) { + ASSERT_EQ(t->tablet_id(), 10100 - index); + ASSERT_EQ(t->calc_compaction_score(), 100 - index); + index++; + } + config::compaction_num_per_round = 1; + // drop all tablets + for (int64_t id = 10001; id <= 10100; ++id) { + Status drop_st = _tablet_mgr->drop_tablet(id, id * 10, false); + ASSERT_TRUE(drop_st.ok()) << drop_st; + } + } + + { + config::compaction_num_per_round = 10; + for (int64_t i = 1; i <= 100; ++i) { + create_tablet(20000 + i, false, i); + } + create_tablet(20102, true, 200); + + compact_tablets = _tablet_mgr->find_best_tablets_to_compaction( + CompactionType::CUMULATIVE_COMPACTION, _data_dir, cumu_set, &score, + cumulative_compaction_policies); + ASSERT_EQ(compact_tablets.size(), 11); + for (int i = 0; i < 10; ++i) { + ASSERT_EQ(compact_tablets[i]->tablet_id(), 20100 - i); + ASSERT_EQ(compact_tablets[i]->calc_compaction_score(), 100 - i); + } + ASSERT_EQ(compact_tablets[10]->tablet_id(), 20102); + ASSERT_EQ(compact_tablets[10]->calc_compaction_score(), 200); + + config::compaction_num_per_round = 1; + // drop all tablets + for (int64_t id = 20001; id <= 20100; ++id) { + Status drop_st = _tablet_mgr->drop_tablet(id, id * 10, false); + ASSERT_TRUE(drop_st.ok()) << drop_st; + } + + Status drop_st = _tablet_mgr->drop_tablet(20102, 20102 * 10, false); + ASSERT_TRUE(drop_st.ok()) << drop_st; + } + + { + config::compaction_num_per_round = 10; + for (int64_t i = 1; i <= 5; ++i) { + create_tablet(30000 + i, false, i + 5); + } + + compact_tablets = _tablet_mgr->find_best_tablets_to_compaction( + CompactionType::CUMULATIVE_COMPACTION, _data_dir, cumu_set, &score, + cumulative_compaction_policies); + ASSERT_EQ(compact_tablets.size(), 5); + for (int i = 0; i < 5; ++i) { + ASSERT_EQ(compact_tablets[i]->tablet_id(), 30000 + 5 - i); + ASSERT_EQ(compact_tablets[i]->calc_compaction_score(), 10 - i); + } + + config::compaction_num_per_round = 1; + // drop all tablets + for (int64_t id = 30001; id <= 30005; ++id) { + Status drop_st = _tablet_mgr->drop_tablet(id, id * 10, false); + ASSERT_TRUE(drop_st.ok()) << drop_st; + } + } + Status trash_st = _tablet_mgr->start_trash_sweep(); ASSERT_TRUE(trash_st.ok()) << trash_st; } From d38970bf82da2d9d3dd91ad21d96d66be6c5a147 Mon Sep 17 00:00:00 2001 From: wuwenchi Date: Fri, 20 Dec 2024 09:43:16 +0800 Subject: [PATCH 18/82] [opt](fs)add local file support (#45632) ### What problem does this PR solve? Problem Summary: Added support for local files to facilitate debugging of some local files in the data lake. --- .../apache/doris/datasource/property/PropertyConverter.java | 3 +++ .../src/main/java/org/apache/doris/fs/FileSystemFactory.java | 1 + 2 files changed, 4 insertions(+) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/property/PropertyConverter.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/property/PropertyConverter.java index 8544ae597f1cd3..7b65411aa6b484 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/property/PropertyConverter.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/property/PropertyConverter.java @@ -41,6 +41,7 @@ import com.google.common.collect.Maps; import org.apache.hadoop.fs.CosFileSystem; import org.apache.hadoop.fs.CosNConfigKeys; +import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem; import org.apache.hadoop.fs.obs.OBSConstants; import org.apache.hadoop.fs.obs.OBSFileSystem; @@ -193,6 +194,8 @@ private static Map convertToOBSProperties(Map pr public static String getHadoopFSImplByScheme(String fsScheme) { if (fsScheme.equalsIgnoreCase("obs")) { return OBSFileSystem.class.getName(); + } else if (fsScheme.equalsIgnoreCase("file")) { + return LocalFileSystem.class.getName(); } else if (fsScheme.equalsIgnoreCase("oss")) { return AliyunOSSFileSystem.class.getName(); } else if (fsScheme.equalsIgnoreCase("cosn") || fsScheme.equalsIgnoreCase("lakefs")) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/fs/FileSystemFactory.java b/fe/fe-core/src/main/java/org/apache/doris/fs/FileSystemFactory.java index 1f3d60d2adf2eb..fb23005f4ac9ac 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/fs/FileSystemFactory.java +++ b/fe/fe-core/src/main/java/org/apache/doris/fs/FileSystemFactory.java @@ -63,6 +63,7 @@ public static RemoteFileSystem getRemoteFileSystem(FileSystemType type, Map Date: Fri, 20 Dec 2024 09:45:05 +0800 Subject: [PATCH 19/82] [fix](iceberg) Fill in the detailed error information (#45415) ### What problem does this PR solve? Related PR: #45285 Problem Summary: When dropping a database, fill in the detailed error information. --- .../doris/datasource/iceberg/IcebergMetadataOps.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java index 440a671afe58f1..da61b2ac1ab7fe 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataOps.java @@ -104,7 +104,7 @@ public List listDatabaseNames() { .map(n -> n.level(n.length() - 1)) .collect(Collectors.toList())); } catch (Exception e) { - throw new RuntimeException("Failed to list database names, error message is: " + e.getMessage()); + throw new RuntimeException("Failed to list database names, error message is:" + e.getMessage(), e); } } @@ -125,7 +125,7 @@ public void createDb(CreateDbStmt stmt) throws DdlException { }); } catch (Exception e) { throw new DdlException("Failed to create database: " - + stmt.getFullDbName() + " ,error message is: " + e.getMessage()); + + stmt.getFullDbName() + ", error message is:" + e.getMessage(), e); } } @@ -161,7 +161,7 @@ public void dropDb(DropDbStmt stmt) throws DdlException { }); } catch (Exception e) { throw new DdlException( - "Failed to drop database: " + stmt.getDbName() + ", error message is: " + e.getMessage(), e); + "Failed to drop database: " + stmt.getDbName() + ", error message is:" + e.getMessage(), e); } } @@ -184,7 +184,8 @@ public boolean createTable(CreateTableStmt stmt) throws UserException { try { preExecutionAuthenticator.execute(() -> performCreateTable(stmt)); } catch (Exception e) { - throw new DdlException("Failed to create table: " + stmt.getTableName() + " ,error message is:", e); + throw new DdlException( + "Failed to create table: " + stmt.getTableName() + ", error message is:" + e.getMessage(), e); } return false; } @@ -228,7 +229,8 @@ public void dropTable(DropTableStmt stmt) throws DdlException { return null; }); } catch (Exception e) { - throw new DdlException("Failed to drop table: " + stmt.getTableName() + " ,error message is:", e); + throw new DdlException( + "Failed to drop table: " + stmt.getTableName() + ", error message is:" + e.getMessage(), e); } } From b515f86669bc118cc03cf9f8332b6397194dc337 Mon Sep 17 00:00:00 2001 From: lw112 <131352377+felixwluo@users.noreply.github.com> Date: Fri, 20 Dec 2024 10:38:03 +0800 Subject: [PATCH 20/82] [fix](fold) fixed an issue with be computing constants (#43410) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What problem does this PR solve? issue close: #43061 1、Problem When enable_fold_constant_by_be=true is set,the results of between below queries are inconsistent select hex(from_base64('wr2JEDVXzL9+2XtRhgIloA==')) +----------------------------------------------+ | hex(from_base64('wr2JEDVXzL9+2XtRhgIloA==')) | +----------------------------------------------+ | C2BD89103557CCBF7ED97B51860225A0 | +----------------------------------------------+ select hex(s) from (select from_base64('wr2JEDVXzL9+2XtRhgIloA==') as s) t +--------------------------------------------------+ | hex(s) | +--------------------------------------------------+ | C2BDEFBFBD103557CCBF7EEFBFBD7B51EFBFBD0225EFBFBD | +--------------------------------------------------+ 2、mysql results select hex(s) from (select from_base64('wr2JEDVXzL9+2XtRhgIloA==') as s) t; +----------------------------------+ | hex(s) | +----------------------------------+ | C2BD89103557CCBF7ED97B51860225A0 | +----------------------------------+ 3、cause When processing binary data such as FromBase64, BE will return the original binary data through the bytesValue field, and the previous code only uses the stringValue field, resulting in the binary data being corrupted during the string encoding conversion process --- .../rules/expression/rules/FoldConstantRuleOnBE.java | 12 ++++++++++-- .../doris/nereids/trees/expressions/LiteralTest.java | 8 ++++++-- .../expression/fold_constant/fold_constant_by_be.out | 6 ++++++ .../fold_constant/fold_constant_by_be.groovy | 11 +++++++---- 4 files changed, 29 insertions(+), 8 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnBE.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnBE.java index 70e63b050a8402..dd79de70e26adb 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnBE.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnBE.java @@ -487,8 +487,16 @@ public static List getResultExpression(DataType type, PValues resultCon } else if (type.isStringLikeType()) { int num = resultContent.getStringValueCount(); for (int i = 0; i < num; ++i) { - Literal literal = new StringLiteral(resultContent.getStringValue(i)); - res.add(literal); + // get the raw byte data to avoid character encoding conversion problems + ByteString bytesValues = resultContent.getBytesValue(i); + // use UTF-8 encoding to ensure proper handling of binary data + String stringValue = bytesValues.toStringUtf8(); + // handle special NULL value cases + if ("\\N".equalsIgnoreCase(stringValue) && resultContent.hasHasNull()) { + res.add(new NullLiteral(type)); + } else { + res.add(new StringLiteral(stringValue)); + } } } else if (type.isArrayType()) { ArrayType arrayType = (ArrayType) type; diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/LiteralTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/LiteralTest.java index fcb64ff0bface9..9c7e2e5b1519b5 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/LiteralTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/LiteralTest.java @@ -233,7 +233,9 @@ public void testGetResultExpressionStruct() { PValues.Builder resultContentBuilder = PValues.newBuilder(); for (int i = 0; i < elementsArray.length; i = i + 2) { childBuilder1.addInt32Value(elementsArray[i]); - childBuilder2.addStringValue("str" + (i + 1)); + String strValue = "str" + (i + 1); + childBuilder2.addStringValue(strValue); + childBuilder2.addBytesValue(com.google.protobuf.ByteString.copyFromUtf8(strValue)); } childBuilder1.setType(childTypeBuilder1.build()); childBuilder2.setType(childTypeBuilder2.build()); @@ -280,7 +282,9 @@ public void testGetResultExpressionStructArray() { PValues.Builder resultContentBuilder = PValues.newBuilder(); for (int i = 0; i < elementsArray.length; i = i + 2) { childBuilder1.addInt32Value(elementsArray[i]); - childBuilder2.addStringValue("str" + (i + 1)); + String strValue = "str" + (i + 1); + childBuilder2.addStringValue(strValue); + childBuilder2.addBytesValue(com.google.protobuf.ByteString.copyFromUtf8(strValue)); } childBuilder1.setType(childTypeBuilder1.build()); childBuilder2.setType(childTypeBuilder2.build()); diff --git a/regression-test/data/nereids_p0/expression/fold_constant/fold_constant_by_be.out b/regression-test/data/nereids_p0/expression/fold_constant/fold_constant_by_be.out index c7c506292a5423..8d9d704684ea7c 100644 --- a/regression-test/data/nereids_p0/expression/fold_constant/fold_constant_by_be.out +++ b/regression-test/data/nereids_p0/expression/fold_constant/fold_constant_by_be.out @@ -1,4 +1,10 @@ -- This file is automatically generated. You should know what you did if you want to edit this +-- !sql -- +C2BD89103557CCBF7ED97B51860225A0 + +-- !sql -- +C2BD89103557CCBF7ED97B51860225A0 + -- !sql_1 -- 80000 diff --git a/regression-test/suites/nereids_p0/expression/fold_constant/fold_constant_by_be.groovy b/regression-test/suites/nereids_p0/expression/fold_constant/fold_constant_by_be.groovy index 09a80209c04ede..f3b1b0cdcd5b16 100644 --- a/regression-test/suites/nereids_p0/expression/fold_constant/fold_constant_by_be.groovy +++ b/regression-test/suites/nereids_p0/expression/fold_constant/fold_constant_by_be.groovy @@ -22,6 +22,9 @@ suite("fold_constant_by_be") { sql 'set enable_fallback_to_original_planner=false' sql 'set enable_fold_constant_by_be=true' + qt_sql """ select hex(from_base64('wr2JEDVXzL9+2XtRhgIloA==')); """ + qt_sql """ select hex(s) from (select from_base64('wr2JEDVXzL9+2XtRhgIloA==') as s) t; """ + test { sql ''' select if( @@ -32,8 +35,8 @@ suite("fold_constant_by_be") { result([['9999-07-31']]) } - sql """ - CREATE TABLE IF NOT EXISTS str_tb (k1 VARCHAR(10) NULL, v1 STRING NULL) + sql """ + CREATE TABLE IF NOT EXISTS str_tb (k1 VARCHAR(10) NULL, v1 STRING NULL) UNIQUE KEY(k1) DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1"); """ @@ -53,7 +56,7 @@ suite("fold_constant_by_be") { sql 'set query_timeout=12;' qt_sql "select sleep(sign(1)*5);" - + explain { sql("verbose select substring('123456', 1, 3)") contains "varchar(3)" @@ -71,7 +74,7 @@ suite("fold_constant_by_be") { col_varchar_1000__undef_signed varchar(1000) null , col_varchar_1000__undef_signed_not_null varchar(1000) not null , col_varchar_1001__undef_signed varchar(1001) null , - col_varchar_1001__undef_signed_not_null varchar(1001) not null + col_varchar_1001__undef_signed_not_null varchar(1001) not null ) engine=olap DUPLICATE KEY(pk, col_char_255__undef_signed, col_char_100__undef_signed) distributed by hash(pk) buckets 10 From 62ff850cfe7fb55effb7a3af81e803a1a1a7a906 Mon Sep 17 00:00:00 2001 From: morrySnow Date: Fri, 20 Dec 2024 12:03:36 +0800 Subject: [PATCH 21/82] [fix](variable) force update variable not work as expected (#45648) ### What problem does this PR solve? Related PR: #41607 Problem Summary: should not reset variable version when replay edit log --- fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java | 2 +- .../src/main/java/org/apache/doris/qe/VariableMgr.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index b3544b4de55460..cc7b8846b1fb44 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -2647,7 +2647,7 @@ public long saveGlobalVariable(CountingDataOutputStream dos, long checksum) thro } public void replayGlobalVariableV2(GlobalVarPersistInfo info) throws IOException, DdlException { - VariableMgr.replayGlobalVariableV2(info); + VariableMgr.replayGlobalVariableV2(info, false); } public long saveLoadJobsV2(CountingDataOutputStream dos, long checksum) throws IOException { diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/VariableMgr.java b/fe/fe-core/src/main/java/org/apache/doris/qe/VariableMgr.java index 5b632c1632e968..82486fb72c759c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/VariableMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/VariableMgr.java @@ -459,21 +459,21 @@ public static void read(DataInputStream in) throws IOException, DdlException { } variablesToRead.readFields(in); GlobalVarPersistInfo info = GlobalVarPersistInfo.read(in); - replayGlobalVariableV2(info); + replayGlobalVariableV2(info, true); } finally { wlock.unlock(); } } // this method is used to replace the `replayGlobalVariable()` - public static void replayGlobalVariableV2(GlobalVarPersistInfo info) throws DdlException { + public static void replayGlobalVariableV2(GlobalVarPersistInfo info, boolean fromImage) throws DdlException { wlock.lock(); try { String json = info.getPersistJsonString(); JSONObject root = (JSONObject) JSONValue.parse(json); // if not variable version, we set it to 0 to ensure we could force set global variable. boolean hasVariableVersion = root.containsKey(GlobalVariable.VARIABLE_VERSION); - if (!hasVariableVersion) { + if (fromImage && !hasVariableVersion) { GlobalVariable.variableVersion = GlobalVariable.VARIABLE_VERSION_0; } for (Object varName : root.keySet()) { From 012acf58a7fde1ca0117fb2b88a0d6a22b9a1447 Mon Sep 17 00:00:00 2001 From: lihangyu Date: Fri, 20 Dec 2024 12:16:00 +0800 Subject: [PATCH 22/82] [improve](variant) only sanitize in debug mode (#45689) 1. could improve serialization performance 2. check is incorrect, example int8 and int16 could be compatible --- be/src/vec/columns/column_object.cpp | 2 ++ be/src/vec/columns/column_object.h | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/be/src/vec/columns/column_object.cpp b/be/src/vec/columns/column_object.cpp index d67a70d2f630f0..3d6a3e44436d29 100644 --- a/be/src/vec/columns/column_object.cpp +++ b/be/src/vec/columns/column_object.cpp @@ -1953,6 +1953,7 @@ std::string ColumnObject::debug_string() const { } Status ColumnObject::sanitize() const { +#ifndef NDEBUG RETURN_IF_CATCH_EXCEPTION(check_consistency()); for (const auto& subcolumn : subcolumns) { if (subcolumn->data.is_finalized()) { @@ -1967,6 +1968,7 @@ Status ColumnObject::sanitize() const { } VLOG_DEBUG << "sanitized " << debug_string(); +#endif return Status::OK(); } diff --git a/be/src/vec/columns/column_object.h b/be/src/vec/columns/column_object.h index e4127197a22b02..037656508e2a7f 100644 --- a/be/src/vec/columns/column_object.h +++ b/be/src/vec/columns/column_object.h @@ -428,7 +428,7 @@ class ColumnObject final : public COWHelper { bool empty() const; - // Check if all columns and types are aligned + // Check if all columns and types are aligned, only in debug mode Status sanitize() const; std::string debug_string() const; From a15e3e549af1aa125e9e5317214c26230f51c415 Mon Sep 17 00:00:00 2001 From: walter Date: Fri, 20 Dec 2024 12:34:24 +0800 Subject: [PATCH 23/82] [fix](catelog) Unifies partition items string (#45669) ### What problem does this PR solve? Issue Number: close #xxx Related PR: #xxx Problem Summary: For range partitions, `getItems().toString()` is equal to `getItemsString`, but for list partitions, there has a `,` between each item. The upsert record of binlog is generated via `getItemsString`, but the getMeta method fetches partition items string via `getItems().toString()`, which are different in the list partitions, and the ccr-syncer is unable to identify them. This PR unifies all partition items string via `getItemsString`. --- .../src/main/java/org/apache/doris/catalog/Env.java | 6 +----- .../org/apache/doris/catalog/ListPartitionItem.java | 12 ++++++------ .../org/apache/doris/catalog/RangePartitionItem.java | 12 ++++++------ .../doris/common/proc/EsPartitionsProcDir.java | 2 +- .../apache/doris/common/proc/PartitionsProcDir.java | 2 +- 5 files changed, 15 insertions(+), 19 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java index cc7b8846b1fb44..1c6345613d768d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/Env.java @@ -6486,11 +6486,7 @@ private static void getTableMeta(OlapTable olapTable, TGetMetaDBMeta dbMeta) { long partitionId = partition.getId(); partitionMeta.setId(partitionId); partitionMeta.setName(partition.getName()); - String partitionRange = ""; - if (tblPartitionInfo.getType() == PartitionType.RANGE - || tblPartitionInfo.getType() == PartitionType.LIST) { - partitionRange = tblPartitionInfo.getItem(partitionId).getItems().toString(); - } + String partitionRange = tblPartitionInfo.getPartitionRangeString(partitionId); partitionMeta.setRange(partitionRange); partitionMeta.setVisibleVersion(partition.getVisibleVersion()); // partitionMeta.setTemp(partition.isTemp()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/ListPartitionItem.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/ListPartitionItem.java index dba109a9539876..98585381244325 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/ListPartitionItem.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/ListPartitionItem.java @@ -61,7 +61,12 @@ public List getItems() { } public String getItemsString() { - return toString(); + // ATTN: DO NOT EDIT unless unless you explicitly guarantee compatibility + // between different versions. + // + // the ccr syncer depends on this string to identify partitions between two + // clusters (cluster versions may be different). + return getItems().toString(); } public String getItemsSql() { @@ -173,11 +178,6 @@ public int hashCode() { @Override public String toString() { - // ATTN: DO NOT EDIT unless unless you explicitly guarantee compatibility - // between different versions. - // - // the ccr syncer depends on this string to identify partitions between two - // clusters (cluster versions may be different). StringBuilder builder = new StringBuilder(); builder.append("partitionKeys: ["); for (PartitionKey partitionKey : partitionKeys) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java index 690ab88991bd16..96bf0097c28a51 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java @@ -46,7 +46,12 @@ public Range getItems() { } public String getItemsString() { - return toString(); + // ATTN: DO NOT EDIT unless unless you explicitly guarantee compatibility + // between different versions. + // + // the ccr syncer depends on this string to identify partitions between two + // clusters (cluster versions may be different). + return partitionKeyRange.toString(); } public String getItemsSql() { @@ -125,11 +130,6 @@ public boolean equals(Object obj) { @Override public String toString() { - // ATTN: DO NOT EDIT unless unless you explicitly guarantee compatibility - // between different versions. - // - // the ccr syncer depends on this string to identify partitions between two - // clusters (cluster versions may be different). return partitionKeyRange.toString(); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/proc/EsPartitionsProcDir.java b/fe/fe-core/src/main/java/org/apache/doris/common/proc/EsPartitionsProcDir.java index 87e7fa449f13a2..ed5cfc18d13e48 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/proc/EsPartitionsProcDir.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/proc/EsPartitionsProcDir.java @@ -93,7 +93,7 @@ public ProcResult fetchResult() throws AnalysisException { } partitionInfo.add(joiner.join(colNames)); // partition key partitionInfo.add( - rangePartitionInfo.getItem(esShardPartitions.getPartitionId()).getItems().toString()); // range + rangePartitionInfo.getItem(esShardPartitions.getPartitionId()).getItemsString()); // range partitionInfo.add("-"); // dis partitionInfo.add(esShardPartitions.getShardRoutings().size()); // shards partitionInfo.add(1); // replica num diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/proc/PartitionsProcDir.java b/fe/fe-core/src/main/java/org/apache/doris/common/proc/PartitionsProcDir.java index 3c44874cb7deff..3ac8b797d64ad6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/proc/PartitionsProcDir.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/proc/PartitionsProcDir.java @@ -323,7 +323,7 @@ private List, TRow>> getPartitionInfosInrernal() throws An String colNamesStr = joiner.join(colNames); partitionInfo.add(colNamesStr); trow.addToColumnValue(new TCell().setStringVal(colNamesStr)); - String itemStr = tblPartitionInfo.getItem(partitionId).getItems().toString(); + String itemStr = tblPartitionInfo.getPartitionRangeString(partitionId); partitionInfo.add(itemStr); trow.addToColumnValue(new TCell().setStringVal(itemStr)); } else { From 8956279255c7a80ef319d68d65bd0f3e0fcd8c7d Mon Sep 17 00:00:00 2001 From: yagagagaga Date: Fri, 20 Dec 2024 12:38:41 +0800 Subject: [PATCH 24/82] [chore](script) fix `start_fe.sh --version` not work and MetaService scripts occur error in Debian GNU/Linux 11 (bullseye) (#45610) 1. fix `start_fe.sh --version` not work 2. fix `ms/bin/start.sh` could not work in Debian GNU/Linux 11 (bullseye) --- bin/start_fe.sh | 6 ++++++ cloud/script/start.sh | 2 +- cloud/script/stop.sh | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/bin/start_fe.sh b/bin/start_fe.sh index ac5971072c306c..b089596a9cdb73 100755 --- a/bin/start_fe.sh +++ b/bin/start_fe.sh @@ -258,6 +258,12 @@ if [[ "${HELPER}" != "" ]]; then HELPER="-helper ${HELPER}" fi +if [[ "${OPT_VERSION}" != "" ]]; then + export DORIS_LOG_TO_STDERR=1 + ${LIMIT:+${LIMIT}} "${JAVA}" org.apache.doris.DorisFE --version + exit 0 +fi + if [[ "${IMAGE_TOOL}" -eq 1 ]]; then if [[ -n "${IMAGE_PATH}" ]]; then ${LIMIT:+${LIMIT}} "${JAVA}" ${final_java_opt:+${final_java_opt}} ${coverage_opt:+${coverage_opt}} org.apache.doris.DorisFE -i "${IMAGE_PATH}" diff --git a/cloud/script/start.sh b/cloud/script/start.sh index 1bce9813f4cf52..ecb5a3b2bed1e6 100644 --- a/cloud/script/start.sh +++ b/cloud/script/start.sh @@ -1,4 +1,4 @@ -#!/usr/bin/bash +#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information diff --git a/cloud/script/stop.sh b/cloud/script/stop.sh index 48f01c545ae840..1b59cef718243e 100644 --- a/cloud/script/stop.sh +++ b/cloud/script/stop.sh @@ -1,4 +1,4 @@ -#!/usr/bin/bash +#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information From 2a1209d3cc77dac4f3ee7073240cd354bd6575c8 Mon Sep 17 00:00:00 2001 From: "Mingyu Chen (Rayner)" Date: Fri, 20 Dec 2024 13:33:25 +0800 Subject: [PATCH 25/82] [opt](catalog) cache the Configuration object (#45433) ### What problem does this PR solve? Problem Summary: Creating Configuration object is very costly, so we cache it for better performance --- .../doris/datasource/ExternalCatalog.java | 21 +++++++++++++++++++ .../hive/HiveMetaStoreClientHelper.java | 7 +------ 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java index d7cbee18c74c7a..2575169f79207f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java @@ -153,6 +153,9 @@ public abstract class ExternalCatalog protected MetaCache> metaCache; protected PreExecutionAuthenticator preExecutionAuthenticator; + private volatile Configuration cachedConf = null; + private final byte[] confLock = new byte[0]; + public ExternalCatalog() { } @@ -164,6 +167,20 @@ public ExternalCatalog(long catalogId, String name, InitCatalogLog.Type logType, } public Configuration getConfiguration() { + // build configuration is costly, so we cache it. + if (cachedConf != null) { + return cachedConf; + } + synchronized (confLock) { + if (cachedConf != null) { + return cachedConf; + } + cachedConf = buildConf(); + return cachedConf; + } + } + + private Configuration buildConf() { Configuration conf = DFSFileSystem.getHdfsConf(ifNotSetFallbackToSimpleAuth()); Map catalogProperties = catalogProperty.getHadoopProperties(); for (Map.Entry entry : catalogProperties.entrySet()) { @@ -409,6 +426,10 @@ public void onRefresh(boolean invalidCache) { this.convertedProperties = null; } + synchronized (this.confLock) { + this.cachedConf = null; + } + refreshOnlyCatalogCache(invalidCache); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java index 884cfbee45ba9f..706bd653a85e21 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java @@ -42,7 +42,6 @@ import org.apache.doris.common.security.authentication.AuthenticationConfig; import org.apache.doris.common.security.authentication.HadoopAuthenticator; import org.apache.doris.datasource.ExternalCatalog; -import org.apache.doris.fs.remote.dfs.DFSFileSystem; import org.apache.doris.thrift.TExprOpcode; import com.google.common.base.Strings; @@ -843,11 +842,7 @@ public static HoodieTableMetaClient getHudiClient(HMSExternalTable table) { } public static Configuration getConfiguration(HMSExternalTable table) { - Configuration conf = DFSFileSystem.getHdfsConf(table.getCatalog().ifNotSetFallbackToSimpleAuth()); - for (Map.Entry entry : table.getHadoopProperties().entrySet()) { - conf.set(entry.getKey(), entry.getValue()); - } - return conf; + return table.getCatalog().getConfiguration(); } public static Optional getSerdeProperty(Table table, String key) { From e3f3f470127091cec057eeb7c724206a5204fa3b Mon Sep 17 00:00:00 2001 From: Pxl Date: Fri, 20 Dec 2024 14:19:31 +0800 Subject: [PATCH 26/82] [Chore](profile) add some profile on ReaderInit (#45556) ### What problem does this PR solve? add some profile on ReaderInit --- be/src/olap/base_tablet.cpp | 5 +- be/src/olap/olap_common.h | 24 ++++++++ be/src/olap/rowset/beta_rowset_reader.cpp | 21 ++++--- be/src/olap/rowset/segment_v2/segment.cpp | 26 ++++++--- be/src/olap/rowset/segment_v2/segment.h | 6 +- .../rowset/segment_v2/segment_iterator.cpp | 10 +++- be/src/olap/tablet_reader.cpp | 7 +++ be/src/pipeline/exec/olap_scan_operator.cpp | 41 +++++++++++++ be/src/pipeline/exec/olap_scan_operator.h | 27 +++++++++ be/src/vec/exec/scan/new_olap_scanner.cpp | 41 +++++++++++++ be/src/vec/olap/block_reader.cpp | 57 +++++++++++-------- be/src/vec/olap/vgeneric_iterators.cpp | 5 +- 12 files changed, 221 insertions(+), 49 deletions(-) diff --git a/be/src/olap/base_tablet.cpp b/be/src/olap/base_tablet.cpp index 82dc122e19f5ef..33275a2663b329 100644 --- a/be/src/olap/base_tablet.cpp +++ b/be/src/olap/base_tablet.cpp @@ -28,6 +28,7 @@ #include "common/status.h" #include "olap/calc_delete_bitmap_executor.h" #include "olap/delete_bitmap_calculator.h" +#include "olap/iterators.h" #include "olap/memtable.h" #include "olap/partial_update_info.h" #include "olap/primary_key_index.h" @@ -81,7 +82,9 @@ Status _get_segment_column_iterator(const BetaRowsetSharedPtr& rowset, uint32_t rowset->rowset_id().to_string(), segid)); } segment_v2::SegmentSharedPtr segment = *it; - RETURN_IF_ERROR(segment->new_column_iterator(target_column, column_iterator, nullptr)); + StorageReadOptions opts; + opts.stats = stats; + RETURN_IF_ERROR(segment->new_column_iterator(target_column, column_iterator, &opts)); segment_v2::ColumnIteratorOptions opt { .use_page_cache = !config::disable_storage_page_cache, .file_reader = segment->file_reader().get(), diff --git a/be/src/olap/olap_common.h b/be/src/olap/olap_common.h index 3b892e5d360e54..a83e6a6df63e1a 100644 --- a/be/src/olap/olap_common.h +++ b/be/src/olap/olap_common.h @@ -389,6 +389,30 @@ struct OlapReaderStatistics { int64_t collect_iterator_merge_next_timer = 0; int64_t collect_iterator_normal_next_timer = 0; int64_t delete_bitmap_get_agg_ns = 0; + + int64_t tablet_reader_init_timer_ns = 0; + int64_t tablet_reader_capture_rs_readers_timer_ns = 0; + int64_t tablet_reader_init_return_columns_timer_ns = 0; + int64_t tablet_reader_init_keys_param_timer_ns = 0; + int64_t tablet_reader_init_orderby_keys_param_timer_ns = 0; + int64_t tablet_reader_init_conditions_param_timer_ns = 0; + int64_t tablet_reader_init_delete_condition_param_timer_ns = 0; + int64_t block_reader_vcollect_iter_init_timer_ns = 0; + int64_t block_reader_rs_readers_init_timer_ns = 0; + int64_t block_reader_build_heap_init_timer_ns = 0; + + int64_t rowset_reader_get_segment_iterators_timer_ns = 0; + int64_t rowset_reader_create_iterators_timer_ns = 0; + int64_t rowset_reader_init_iterators_timer_ns = 0; + int64_t rowset_reader_load_segments_timer_ns = 0; + + int64_t segment_iterator_init_timer_ns = 0; + int64_t segment_iterator_init_return_column_iterators_timer_ns = 0; + int64_t segment_iterator_init_bitmap_index_iterators_timer_ns = 0; + int64_t segment_iterator_init_inverted_index_iterators_timer_ns = 0; + + int64_t segment_create_column_readers_timer_ns = 0; + int64_t segment_load_index_timer_ns = 0; }; using ColumnId = uint32_t; diff --git a/be/src/olap/rowset/beta_rowset_reader.cpp b/be/src/olap/rowset/beta_rowset_reader.cpp index 47cf9b820e8562..9a4d71587a02c1 100644 --- a/be/src/olap/rowset/beta_rowset_reader.cpp +++ b/be/src/olap/rowset/beta_rowset_reader.cpp @@ -78,7 +78,6 @@ bool BetaRowsetReader::update_profile(RuntimeProfile* profile) { Status BetaRowsetReader::get_segment_iterators(RowsetReaderContext* read_context, std::vector* out_iters, bool use_cache) { - RETURN_IF_ERROR(_rowset->load()); _read_context = read_context; // The segment iterator is created with its own statistics, // and the member variable '_stats' is initialized by '_stats(&owned_stats)'. @@ -92,6 +91,9 @@ Status BetaRowsetReader::get_segment_iterators(RowsetReaderContext* read_context if (_read_context->stats != nullptr) { _stats = _read_context->stats; } + SCOPED_RAW_TIMER(&_stats->rowset_reader_get_segment_iterators_timer_ns); + + RETURN_IF_ERROR(_rowset->load()); // convert RowsetReaderContext to StorageReadOptions _read_options.block_row_max = read_context->batch_size; @@ -225,9 +227,12 @@ Status BetaRowsetReader::get_segment_iterators(RowsetReaderContext* read_context bool should_use_cache = use_cache || (_read_context->reader_type == ReaderType::READER_QUERY && enable_segment_cache); SegmentCacheHandle segment_cache_handle; - RETURN_IF_ERROR(SegmentLoader::instance()->load_segments(_rowset, &segment_cache_handle, - should_use_cache, - /*need_load_pk_index_and_bf*/ false)); + { + SCOPED_RAW_TIMER(&_stats->rowset_reader_load_segments_timer_ns); + RETURN_IF_ERROR(SegmentLoader::instance()->load_segments( + _rowset, &segment_cache_handle, should_use_cache, + /*need_load_pk_index_and_bf*/ false)); + } // create iterator for each segment auto& segments = segment_cache_handle.get_segments(); @@ -253,6 +258,7 @@ Status BetaRowsetReader::get_segment_iterators(RowsetReaderContext* read_context const bool use_lazy_init_iterators = !is_merge_iterator && _read_context->reader_type == ReaderType::READER_QUERY; for (int i = seg_start; i < seg_end; i++) { + SCOPED_RAW_TIMER(&_stats->rowset_reader_create_iterators_timer_ns); auto& seg_ptr = segments[i]; std::unique_ptr iter; @@ -317,6 +323,8 @@ Status BetaRowsetReader::_init_iterator() { std::vector iterators; RETURN_IF_ERROR(get_segment_iterators(_read_context, &iterators)); + SCOPED_RAW_TIMER(&_stats->rowset_reader_init_iterators_timer_ns); + if (_read_context->merged_rows == nullptr) { _read_context->merged_rows = &_merged_rows; } @@ -352,8 +360,8 @@ Status BetaRowsetReader::_init_iterator() { } Status BetaRowsetReader::next_block(vectorized::Block* block) { - SCOPED_RAW_TIMER(&_stats->block_fetch_ns); RETURN_IF_ERROR(_init_iterator_once()); + SCOPED_RAW_TIMER(&_stats->block_fetch_ns); if (_empty) { return Status::Error("BetaRowsetReader is empty"); } @@ -381,9 +389,8 @@ Status BetaRowsetReader::next_block(vectorized::Block* block) { } Status BetaRowsetReader::next_block_view(vectorized::BlockView* block_view) { - SCOPED_RAW_TIMER(&_stats->block_fetch_ns); RETURN_IF_ERROR(_init_iterator_once()); - + SCOPED_RAW_TIMER(&_stats->block_fetch_ns); RuntimeState* runtime_state = nullptr; if (_read_context != nullptr) { runtime_state = _read_context->runtime_state; diff --git a/be/src/olap/rowset/segment_v2/segment.cpp b/be/src/olap/rowset/segment_v2/segment.cpp index 513c0be4f8cd14..d55d84901c2e66 100644 --- a/be/src/olap/rowset/segment_v2/segment.cpp +++ b/be/src/olap/rowset/segment_v2/segment.cpp @@ -228,7 +228,7 @@ Status Segment::new_iterator(SchemaSPtr schema, const StorageReadOptions& read_o if (read_options.runtime_state != nullptr) { _be_exec_version = read_options.runtime_state->be_exec_version(); } - RETURN_IF_ERROR(_create_column_readers_once()); + RETURN_IF_ERROR(_create_column_readers_once(read_options.stats)); read_options.stats->total_segment_number++; // trying to prune the current segment by segment-level zone map @@ -288,7 +288,11 @@ Status Segment::new_iterator(SchemaSPtr schema, const StorageReadOptions& read_o } } - RETURN_IF_ERROR(load_index()); + { + SCOPED_RAW_TIMER(&read_options.stats->segment_load_index_timer_ns); + RETURN_IF_ERROR(load_index()); + } + if (read_options.delete_condition_predicates->num_of_column_predicate() == 0 && read_options.push_down_agg_type_opt != TPushAggOp::NONE && read_options.push_down_agg_type_opt != TPushAggOp::COUNT_ON_INDEX) { @@ -594,7 +598,8 @@ vectorized::DataTypePtr Segment::get_data_type_of(const ColumnIdentifier& identi return nullptr; } -Status Segment::_create_column_readers_once() { +Status Segment::_create_column_readers_once(OlapReaderStatistics* stats) { + SCOPED_RAW_TIMER(&stats->segment_create_column_readers_timer_ns); return _create_column_readers_once_call.call([&] { DCHECK(_footer_pb); Defer defer([&]() { _footer_pb.reset(); }); @@ -868,10 +873,10 @@ Status Segment::new_column_iterator_with_path(const TabletColumn& tablet_column, Status Segment::new_column_iterator(const TabletColumn& tablet_column, std::unique_ptr* iter, const StorageReadOptions* opt) { - if (opt != nullptr && opt->runtime_state != nullptr) { + if (opt->runtime_state != nullptr) { _be_exec_version = opt->runtime_state->be_exec_version(); } - RETURN_IF_ERROR(_create_column_readers_once()); + RETURN_IF_ERROR(_create_column_readers_once(opt->stats)); // init column iterator by path info if (tablet_column.has_path_info() || tablet_column.is_variant_type()) { @@ -899,8 +904,9 @@ Status Segment::new_column_iterator(const TabletColumn& tablet_column, return Status::OK(); } -Status Segment::new_column_iterator(int32_t unique_id, std::unique_ptr* iter) { - RETURN_IF_ERROR(_create_column_readers_once()); +Status Segment::new_column_iterator(int32_t unique_id, const StorageReadOptions* opt, + std::unique_ptr* iter) { + RETURN_IF_ERROR(_create_column_readers_once(opt->stats)); ColumnIterator* it; RETURN_IF_ERROR(_column_readers.at(unique_id)->new_iterator(&it)); iter->reset(it); @@ -928,8 +934,9 @@ ColumnReader* Segment::_get_column_reader(const TabletColumn& col) { } Status Segment::new_bitmap_index_iterator(const TabletColumn& tablet_column, + const StorageReadOptions& read_options, std::unique_ptr* iter) { - RETURN_IF_ERROR(_create_column_readers_once()); + RETURN_IF_ERROR(_create_column_readers_once(read_options.stats)); ColumnReader* reader = _get_column_reader(tablet_column); if (reader != nullptr && reader->has_bitmap_index()) { BitmapIndexIterator* it; @@ -947,7 +954,7 @@ Status Segment::new_inverted_index_iterator(const TabletColumn& tablet_column, if (read_options.runtime_state != nullptr) { _be_exec_version = read_options.runtime_state->be_exec_version(); } - RETURN_IF_ERROR(_create_column_readers_once()); + RETURN_IF_ERROR(_create_column_readers_once(read_options.stats)); ColumnReader* reader = _get_column_reader(tablet_column); if (reader != nullptr && index_meta) { if (_inverted_index_file_reader == nullptr) { @@ -1116,6 +1123,7 @@ Status Segment::seek_and_read_by_rowid(const TabletSchema& schema, SlotDescripto OlapReaderStatistics& stats, std::unique_ptr& iterator_hint) { StorageReadOptions storage_read_opt; + storage_read_opt.stats = &stats; storage_read_opt.io_ctx.reader_type = ReaderType::READER_QUERY; segment_v2::ColumnIteratorOptions opt { .use_page_cache = !config::disable_storage_page_cache, diff --git a/be/src/olap/rowset/segment_v2/segment.h b/be/src/olap/rowset/segment_v2/segment.h index 1b20c1f066bdf9..ca2fee0e77aa82 100644 --- a/be/src/olap/rowset/segment_v2/segment.h +++ b/be/src/olap/rowset/segment_v2/segment.h @@ -111,9 +111,11 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd std::unique_ptr* iter, const StorageReadOptions* opt); - Status new_column_iterator(int32_t unique_id, std::unique_ptr* iter); + Status new_column_iterator(int32_t unique_id, const StorageReadOptions* opt, + std::unique_ptr* iter); Status new_bitmap_index_iterator(const TabletColumn& tablet_column, + const StorageReadOptions& read_options, std::unique_ptr* iter); Status new_inverted_index_iterator(const TabletColumn& tablet_column, @@ -238,7 +240,7 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd Status _open_inverted_index(); - Status _create_column_readers_once(); + Status _create_column_readers_once(OlapReaderStatistics* stats); private: friend class SegmentIterator; diff --git a/be/src/olap/rowset/segment_v2/segment_iterator.cpp b/be/src/olap/rowset/segment_v2/segment_iterator.cpp index abdf9116756f0e..0c54eaa2d6cbaa 100644 --- a/be/src/olap/rowset/segment_v2/segment_iterator.cpp +++ b/be/src/olap/rowset/segment_v2/segment_iterator.cpp @@ -281,9 +281,10 @@ Status SegmentIterator::_init_impl(const StorageReadOptions& opts) { if (_inited) { return Status::OK(); } + _opts = opts; + SCOPED_RAW_TIMER(&_opts.stats->segment_iterator_init_timer_ns); _inited = true; _file_reader = _segment->_file_reader; - _opts = opts; _col_predicates.clear(); for (const auto& predicate : opts.column_predicates) { @@ -1005,6 +1006,7 @@ bool SegmentIterator::_check_all_conditions_passed_inverted_index_for_column(Col } Status SegmentIterator::_init_return_column_iterators() { + SCOPED_RAW_TIMER(&_opts.stats->segment_iterator_init_return_column_iterators_timer_ns); if (_cur_rowid >= num_rows()) { return Status::OK(); } @@ -1047,19 +1049,21 @@ Status SegmentIterator::_init_return_column_iterators() { } Status SegmentIterator::_init_bitmap_index_iterators() { + SCOPED_RAW_TIMER(&_opts.stats->segment_iterator_init_bitmap_index_iterators_timer_ns); if (_cur_rowid >= num_rows()) { return Status::OK(); } for (auto cid : _schema->column_ids()) { if (_bitmap_index_iterators[cid] == nullptr) { - RETURN_IF_ERROR(_segment->new_bitmap_index_iterator(_opts.tablet_schema->column(cid), - &_bitmap_index_iterators[cid])); + RETURN_IF_ERROR(_segment->new_bitmap_index_iterator( + _opts.tablet_schema->column(cid), _opts, &_bitmap_index_iterators[cid])); } } return Status::OK(); } Status SegmentIterator::_init_inverted_index_iterators() { + SCOPED_RAW_TIMER(&_opts.stats->segment_iterator_init_inverted_index_iterators_timer_ns); if (_cur_rowid >= num_rows()) { return Status::OK(); } diff --git a/be/src/olap/tablet_reader.cpp b/be/src/olap/tablet_reader.cpp index 17cab2a3c0c834..416d0fea476b32 100644 --- a/be/src/olap/tablet_reader.cpp +++ b/be/src/olap/tablet_reader.cpp @@ -120,6 +120,7 @@ TabletReader::~TabletReader() { } Status TabletReader::init(const ReaderParams& read_params) { + SCOPED_RAW_TIMER(&_stats.tablet_reader_init_timer_ns); _predicate_arena = std::make_unique(); Status res = _init_params(read_params); @@ -159,6 +160,7 @@ bool TabletReader::_optimize_for_single_rowset( } Status TabletReader::_capture_rs_readers(const ReaderParams& read_params) { + SCOPED_RAW_TIMER(&_stats.tablet_reader_capture_rs_readers_timer_ns); if (read_params.rs_splits.empty()) { return Status::InternalError("fail to acquire data sources. tablet={}", _tablet->tablet_id()); @@ -331,6 +333,7 @@ Status TabletReader::_init_params(const ReaderParams& read_params) { } Status TabletReader::_init_return_columns(const ReaderParams& read_params) { + SCOPED_RAW_TIMER(&_stats.tablet_reader_init_return_columns_timer_ns); if (read_params.reader_type == ReaderType::READER_QUERY) { _return_columns = read_params.return_columns; _tablet_columns_convert_to_null_set = read_params.tablet_columns_convert_to_null_set; @@ -387,6 +390,7 @@ Status TabletReader::_init_return_columns(const ReaderParams& read_params) { } Status TabletReader::_init_keys_param(const ReaderParams& read_params) { + SCOPED_RAW_TIMER(&_stats.tablet_reader_init_keys_param_timer_ns); if (read_params.start_key.empty()) { return Status::OK(); } @@ -461,6 +465,7 @@ Status TabletReader::_init_keys_param(const ReaderParams& read_params) { } Status TabletReader::_init_orderby_keys_param(const ReaderParams& read_params) { + SCOPED_RAW_TIMER(&_stats.tablet_reader_init_orderby_keys_param_timer_ns); // UNIQUE_KEYS will compare all keys as before if (_tablet_schema->keys_type() == DUP_KEYS || (_tablet_schema->keys_type() == UNIQUE_KEYS && _tablet->enable_unique_key_merge_on_write())) { @@ -513,6 +518,7 @@ Status TabletReader::_init_orderby_keys_param(const ReaderParams& read_params) { } Status TabletReader::_init_conditions_param(const ReaderParams& read_params) { + SCOPED_RAW_TIMER(&_stats.tablet_reader_init_conditions_param_timer_ns); std::vector predicates; for (const auto& condition : read_params.conditions) { TCondition tmp_cond = condition; @@ -639,6 +645,7 @@ ColumnPredicate* TabletReader::_parse_to_predicate(const FunctionFilter& functio } Status TabletReader::_init_delete_condition(const ReaderParams& read_params) { + SCOPED_RAW_TIMER(&_stats.tablet_reader_init_delete_condition_param_timer_ns); // If it's cumu and not allow do delete when cumu if (read_params.reader_type == ReaderType::READER_SEGMENT_COMPACTION || (read_params.reader_type == ReaderType::READER_CUMULATIVE_COMPACTION && diff --git a/be/src/pipeline/exec/olap_scan_operator.cpp b/be/src/pipeline/exec/olap_scan_operator.cpp index 34fa741ff1ec00..fa91caffa8ebc4 100644 --- a/be/src/pipeline/exec/olap_scan_operator.cpp +++ b/be/src/pipeline/exec/olap_scan_operator.cpp @@ -150,6 +150,47 @@ Status OlapScanLocalState::_init_profile() { _tablet_counter = ADD_COUNTER(_runtime_profile, "TabletNum", TUnit::UNIT); _key_range_counter = ADD_COUNTER(_runtime_profile, "KeyRangesNum", TUnit::UNIT); _runtime_filter_info = ADD_LABEL_COUNTER_WITH_LEVEL(_runtime_profile, "RuntimeFilterInfo", 1); + + _tablet_reader_init_timer = ADD_TIMER(_scanner_profile, "TabletReaderInitTimer"); + _tablet_reader_capture_rs_readers_timer = + ADD_TIMER(_scanner_profile, "TabletReaderCaptureRsReadersTimer"); + _tablet_reader_init_return_columns_timer = + ADD_TIMER(_scanner_profile, "TabletReaderInitReturnColumnsTimer"); + _tablet_reader_init_keys_param_timer = + ADD_TIMER(_scanner_profile, "TabletReaderInitKeysParamTimer"); + _tablet_reader_init_orderby_keys_param_timer = + ADD_TIMER(_scanner_profile, "TabletReaderInitOrderbyKeysParamTimer"); + _tablet_reader_init_conditions_param_timer = + ADD_TIMER(_scanner_profile, "TabletReaderInitConditionsParamTimer"); + _tablet_reader_init_delete_condition_param_timer = + ADD_TIMER(_scanner_profile, "TabletReaderInitDeleteConditionParamTimer"); + _block_reader_vcollect_iter_init_timer = + ADD_TIMER(_scanner_profile, "BlockReaderVcollectIterInitTimer"); + _block_reader_rs_readers_init_timer = + ADD_TIMER(_scanner_profile, "BlockReaderRsReadersInitTimer"); + _block_reader_build_heap_init_timer = + ADD_TIMER(_scanner_profile, "BlockReaderBuildHeapInitTimer"); + + _rowset_reader_get_segment_iterators_timer = + ADD_TIMER(_scanner_profile, "RowsetReaderGetSegmentIteratorsTimer"); + _rowset_reader_create_iterators_timer = + ADD_TIMER(_scanner_profile, "RowsetReaderCreateIteratorsTimer"); + _rowset_reader_init_iterators_timer = + ADD_TIMER(_scanner_profile, "RowsetReaderInitIteratorsTimer"); + _rowset_reader_load_segments_timer = + ADD_TIMER(_scanner_profile, "RowsetReaderLoadSegmentsTimer"); + + _segment_iterator_init_timer = ADD_TIMER(_scanner_profile, "SegmentIteratorInitTimer"); + _segment_iterator_init_return_column_iterators_timer = + ADD_TIMER(_scanner_profile, "SegmentIteratorInitReturnColumnIteratorsTimer"); + _segment_iterator_init_bitmap_index_iterators_timer = + ADD_TIMER(_scanner_profile, "SegmentIteratorInitBitmapIndexIteratorsTimer"); + _segment_iterator_init_inverted_index_iterators_timer = + ADD_TIMER(_scanner_profile, "SegmentIteratorInitInvertedIndexIteratorsTimer"); + + _segment_create_column_readers_timer = + ADD_TIMER(_scanner_profile, "SegmentCreateColumnReadersTimer"); + _segment_load_index_timer = ADD_TIMER(_scanner_profile, "SegmentLoadIndexTimer"); return Status::OK(); } diff --git a/be/src/pipeline/exec/olap_scan_operator.h b/be/src/pipeline/exec/olap_scan_operator.h index 91980d6a3f172b..0e8e7223d4b8c5 100644 --- a/be/src/pipeline/exec/olap_scan_operator.h +++ b/be/src/pipeline/exec/olap_scan_operator.h @@ -184,6 +184,33 @@ class OlapScanLocalState final : public ScanLocalState { RuntimeProfile::Counter* _runtime_filter_info = nullptr; + // timer about tablet reader + RuntimeProfile::Counter* _tablet_reader_init_timer = nullptr; + RuntimeProfile::Counter* _tablet_reader_capture_rs_readers_timer = nullptr; + RuntimeProfile::Counter* _tablet_reader_init_return_columns_timer = nullptr; + RuntimeProfile::Counter* _tablet_reader_init_keys_param_timer = nullptr; + RuntimeProfile::Counter* _tablet_reader_init_orderby_keys_param_timer = nullptr; + RuntimeProfile::Counter* _tablet_reader_init_conditions_param_timer = nullptr; + RuntimeProfile::Counter* _tablet_reader_init_delete_condition_param_timer = nullptr; + + // timer about block reader + RuntimeProfile::Counter* _block_reader_vcollect_iter_init_timer = nullptr; + RuntimeProfile::Counter* _block_reader_rs_readers_init_timer = nullptr; + RuntimeProfile::Counter* _block_reader_build_heap_init_timer = nullptr; + + RuntimeProfile::Counter* _rowset_reader_get_segment_iterators_timer = nullptr; + RuntimeProfile::Counter* _rowset_reader_create_iterators_timer = nullptr; + RuntimeProfile::Counter* _rowset_reader_init_iterators_timer = nullptr; + RuntimeProfile::Counter* _rowset_reader_load_segments_timer = nullptr; + + RuntimeProfile::Counter* _segment_iterator_init_timer = nullptr; + RuntimeProfile::Counter* _segment_iterator_init_return_column_iterators_timer = nullptr; + RuntimeProfile::Counter* _segment_iterator_init_bitmap_index_iterators_timer = nullptr; + RuntimeProfile::Counter* _segment_iterator_init_inverted_index_iterators_timer = nullptr; + + RuntimeProfile::Counter* _segment_create_column_readers_timer = nullptr; + RuntimeProfile::Counter* _segment_load_index_timer = nullptr; + std::mutex _profile_mtx; }; diff --git a/be/src/vec/exec/scan/new_olap_scanner.cpp b/be/src/vec/exec/scan/new_olap_scanner.cpp index 4c0b30e440ecf5..d3a05cbb3c2fe6 100644 --- a/be/src/vec/exec/scan/new_olap_scanner.cpp +++ b/be/src/vec/exec/scan/new_olap_scanner.cpp @@ -649,6 +649,47 @@ void NewOlapScanner::_collect_profile_before_close() { COUNTER_UPDATE(local_state->_filtered_segment_counter, stats.filtered_segment_number); COUNTER_UPDATE(local_state->_total_segment_counter, stats.total_segment_number); + COUNTER_UPDATE(local_state->_tablet_reader_init_timer, stats.tablet_reader_init_timer_ns); + COUNTER_UPDATE(local_state->_tablet_reader_capture_rs_readers_timer, + stats.tablet_reader_capture_rs_readers_timer_ns); + COUNTER_UPDATE(local_state->_tablet_reader_init_return_columns_timer, + stats.tablet_reader_init_return_columns_timer_ns); + COUNTER_UPDATE(local_state->_tablet_reader_init_keys_param_timer, + stats.tablet_reader_init_keys_param_timer_ns); + COUNTER_UPDATE(local_state->_tablet_reader_init_orderby_keys_param_timer, + stats.tablet_reader_init_orderby_keys_param_timer_ns); + COUNTER_UPDATE(local_state->_tablet_reader_init_conditions_param_timer, + stats.tablet_reader_init_conditions_param_timer_ns); + COUNTER_UPDATE(local_state->_tablet_reader_init_delete_condition_param_timer, + stats.tablet_reader_init_delete_condition_param_timer_ns); + COUNTER_UPDATE(local_state->_block_reader_vcollect_iter_init_timer, + stats.block_reader_vcollect_iter_init_timer_ns); + COUNTER_UPDATE(local_state->_block_reader_rs_readers_init_timer, + stats.block_reader_rs_readers_init_timer_ns); + COUNTER_UPDATE(local_state->_block_reader_build_heap_init_timer, + stats.block_reader_build_heap_init_timer_ns); + + COUNTER_UPDATE(local_state->_rowset_reader_get_segment_iterators_timer, + stats.rowset_reader_get_segment_iterators_timer_ns); + COUNTER_UPDATE(local_state->_rowset_reader_create_iterators_timer, + stats.rowset_reader_create_iterators_timer_ns); + COUNTER_UPDATE(local_state->_rowset_reader_init_iterators_timer, + stats.rowset_reader_init_iterators_timer_ns); + COUNTER_UPDATE(local_state->_rowset_reader_load_segments_timer, + stats.rowset_reader_load_segments_timer_ns); + + COUNTER_UPDATE(local_state->_segment_iterator_init_timer, stats.segment_iterator_init_timer_ns); + COUNTER_UPDATE(local_state->_segment_iterator_init_return_column_iterators_timer, + stats.segment_iterator_init_return_column_iterators_timer_ns); + COUNTER_UPDATE(local_state->_segment_iterator_init_bitmap_index_iterators_timer, + stats.segment_iterator_init_bitmap_index_iterators_timer_ns); + COUNTER_UPDATE(local_state->_segment_iterator_init_inverted_index_iterators_timer, + stats.segment_iterator_init_inverted_index_iterators_timer_ns); + + COUNTER_UPDATE(local_state->_segment_create_column_readers_timer, + stats.segment_create_column_readers_timer_ns); + COUNTER_UPDATE(local_state->_segment_load_index_timer, stats.segment_load_index_timer_ns); + // Update metrics DorisMetrics::instance()->query_scan_bytes->increment( local_state->_read_compressed_counter->value()); diff --git a/be/src/vec/olap/block_reader.cpp b/be/src/vec/olap/block_reader.cpp index c46ff330f2bef1..07befd47d88781 100644 --- a/be/src/vec/olap/block_reader.cpp +++ b/be/src/vec/olap/block_reader.cpp @@ -108,40 +108,49 @@ Status BlockReader::_init_collect_iter(const ReaderParams& read_params) { return res; } // check if rowsets are noneoverlapping - _is_rowsets_overlapping = _rowsets_mono_asc_disjoint(read_params); - _vcollect_iter.init(this, _is_rowsets_overlapping, read_params.read_orderby_key, - read_params.read_orderby_key_reverse); + { + SCOPED_RAW_TIMER(&_stats.block_reader_vcollect_iter_init_timer_ns); + _is_rowsets_overlapping = _rowsets_mono_asc_disjoint(read_params); + _vcollect_iter.init(this, _is_rowsets_overlapping, read_params.read_orderby_key, + read_params.read_orderby_key_reverse); + } std::vector valid_rs_readers; RuntimeState* runtime_state = read_params.runtime_state; - for (int i = 0; i < read_params.rs_splits.size(); ++i) { - if (runtime_state != nullptr && runtime_state->is_cancelled()) { - return runtime_state->cancel_reason(); - } + { + SCOPED_RAW_TIMER(&_stats.block_reader_rs_readers_init_timer_ns); + for (int i = 0; i < read_params.rs_splits.size(); ++i) { + if (runtime_state != nullptr && runtime_state->is_cancelled()) { + return runtime_state->cancel_reason(); + } - auto& rs_split = read_params.rs_splits[i]; + auto& rs_split = read_params.rs_splits[i]; - // _vcollect_iter.topn_next() will init rs_reader by itself - if (!_vcollect_iter.use_topn_next()) { - RETURN_IF_ERROR(rs_split.rs_reader->init(&_reader_context, rs_split)); - } + // _vcollect_iter.topn_next() will init rs_reader by itself + if (!_vcollect_iter.use_topn_next()) { + RETURN_IF_ERROR(rs_split.rs_reader->init(&_reader_context, rs_split)); + } - Status res = _vcollect_iter.add_child(rs_split); - if (!res.ok() && !res.is()) { - LOG(WARNING) << "failed to add child to iterator, err=" << res; - return res; - } - if (res.ok()) { - valid_rs_readers.push_back(rs_split.rs_reader); + Status res = _vcollect_iter.add_child(rs_split); + if (!res.ok() && !res.is()) { + LOG(WARNING) << "failed to add child to iterator, err=" << res; + return res; + } + if (res.ok()) { + valid_rs_readers.push_back(rs_split.rs_reader); + } } } - RETURN_IF_ERROR(_vcollect_iter.build_heap(valid_rs_readers)); - // _vcollect_iter.topn_next() can not use current_row - if (!_vcollect_iter.use_topn_next()) { - auto status = _vcollect_iter.current_row(&_next_row); - _eof = status.is(); + { + SCOPED_RAW_TIMER(&_stats.block_reader_build_heap_init_timer_ns); + RETURN_IF_ERROR(_vcollect_iter.build_heap(valid_rs_readers)); + // _vcollect_iter.topn_next() can not use current_row + if (!_vcollect_iter.use_topn_next()) { + auto status = _vcollect_iter.current_row(&_next_row); + _eof = status.is(); + } } return Status::OK(); diff --git a/be/src/vec/olap/vgeneric_iterators.cpp b/be/src/vec/olap/vgeneric_iterators.cpp index d8a073fc11a508..fe37abd08fa7bb 100644 --- a/be/src/vec/olap/vgeneric_iterators.cpp +++ b/be/src/vec/olap/vgeneric_iterators.cpp @@ -50,9 +50,8 @@ Status VStatisticsIterator::init(const StorageReadOptions& opts) { auto cid = _schema.column_id(i); auto unique_id = _schema.column(cid)->unique_id(); if (_column_iterators_map.count(unique_id) < 1) { - RETURN_IF_ERROR(_segment->new_column_iterator(opts.tablet_schema->column(cid), - &_column_iterators_map[unique_id], - nullptr)); + RETURN_IF_ERROR(_segment->new_column_iterator( + opts.tablet_schema->column(cid), &_column_iterators_map[unique_id], &opts)); } _column_iterators.push_back(_column_iterators_map[unique_id].get()); } From 8b36f79ce779d960fbe00dd48fcff1a83e7016d9 Mon Sep 17 00:00:00 2001 From: Dongyang Li Date: Fri, 20 Dec 2024 14:20:23 +0800 Subject: [PATCH 27/82] [ci](perf) add back required of performance check (#45694) --- .asf.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.asf.yaml b/.asf.yaml index e3d516b35c19a5..7a7d845e4c9bb0 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -63,6 +63,7 @@ github: - COMPILE (DORIS_COMPILE) - Need_2_Approval - Cloud UT (Doris Cloud UT) + - performance (Doris Performance) required_pull_request_reviews: dismiss_stale_reviews: true From db3aff97c7899fa7eca54291e27fd9f33d016274 Mon Sep 17 00:00:00 2001 From: starocean999 Date: Fri, 20 Dec 2024 14:26:01 +0800 Subject: [PATCH 28/82] [fix](nereids)use equals instead of == to compare String (#45628) when comparing column name, we should use equals method instead of '==' to compare the content --- .../org/apache/doris/nereids/rules/analysis/BindRelation.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java index 583244f0902896..d494f90c9cb804 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindRelation.java @@ -257,7 +257,7 @@ private LogicalPlan preAggForRandomDistribution(LogicalOlapScan olapScan) { SlotReference slot = SlotReference.fromColumn(olapTable, col, col.getName(), olapScan.qualified()); ExprId exprId = slot.getExprId(); for (Slot childSlot : childOutputSlots) { - if (childSlot instanceof SlotReference && ((SlotReference) childSlot).getName() == col.getName()) { + if (childSlot instanceof SlotReference && ((SlotReference) childSlot).getName().equals(col.getName())) { exprId = childSlot.getExprId(); slot = slot.withExprId(exprId); break; From e81ecb5fd475de0e02dd7d5c16cbf4db040f61d9 Mon Sep 17 00:00:00 2001 From: minghong Date: Fri, 20 Dec 2024 15:08:02 +0800 Subject: [PATCH 29/82] [fix](nereids) support one phase DeferMaterializeTopN (#45693) ### What problem does this PR solve? make DeferMaterializeTopN support one phase sort. --- ...izeTopNToPhysicalDeferMaterializeTopN.java | 9 +++- .../defer_materialize_topn/one_phase.out | 7 +++ .../cte/test_cte_name_reuse.groovy | 2 +- .../defer_materialize_topn/one_phase.groovy | 50 +++++++++++++++++++ 4 files changed, 65 insertions(+), 3 deletions(-) create mode 100644 regression-test/data/nereids_rules_p0/defer_materialize_topn/one_phase.out create mode 100644 regression-test/suites/nereids_rules_p0/defer_materialize_topn/one_phase.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalDeferMaterializeTopNToPhysicalDeferMaterializeTopN.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalDeferMaterializeTopNToPhysicalDeferMaterializeTopN.java index 9ad6b73d1c85cf..2799ca30147bc7 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalDeferMaterializeTopNToPhysicalDeferMaterializeTopN.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/implementation/LogicalDeferMaterializeTopNToPhysicalDeferMaterializeTopN.java @@ -20,6 +20,7 @@ import org.apache.doris.nereids.rules.Rule; import org.apache.doris.nereids.rules.RuleType; import org.apache.doris.nereids.trees.plans.Plan; +import org.apache.doris.nereids.trees.plans.SortPhase; import org.apache.doris.nereids.trees.plans.logical.LogicalDeferMaterializeTopN; import org.apache.doris.nereids.trees.plans.physical.PhysicalDeferMaterializeTopN; import org.apache.doris.nereids.trees.plans.physical.PhysicalTopN; @@ -38,8 +39,12 @@ public Rule build() { .build() .transform(topN.getLogicalTopN(), ctx.cascadesContext) .get(0); - return wrap(physicalTopN, topN, wrap((PhysicalTopN) physicalTopN.child(), topN, - ((PhysicalTopN) physicalTopN.child()).child())); + if (physicalTopN.getSortPhase() == SortPhase.MERGE_SORT) { + return wrap(physicalTopN, topN, wrap((PhysicalTopN) physicalTopN.child(), topN, + ((PhysicalTopN) physicalTopN.child()).child())); + } else { + return wrap(physicalTopN, topN, physicalTopN.child()); + } }).toRule(RuleType.LOGICAL_DEFER_MATERIALIZE_TOP_N_TO_PHYSICAL_DEFER_MATERIALIZE_TOP_N_RULE); } diff --git a/regression-test/data/nereids_rules_p0/defer_materialize_topn/one_phase.out b/regression-test/data/nereids_rules_p0/defer_materialize_topn/one_phase.out new file mode 100644 index 00000000000000..cb3f8c9b7ccdf4 --- /dev/null +++ b/regression-test/data/nereids_rules_p0/defer_materialize_topn/one_phase.out @@ -0,0 +1,7 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !1 -- +11113 + +-- !2 -- +11113 + diff --git a/regression-test/suites/nereids_rules_p0/cte/test_cte_name_reuse.groovy b/regression-test/suites/nereids_rules_p0/cte/test_cte_name_reuse.groovy index 5d472716f00b73..0f943db5b311d7 100644 --- a/regression-test/suites/nereids_rules_p0/cte/test_cte_name_reuse.groovy +++ b/regression-test/suites/nereids_rules_p0/cte/test_cte_name_reuse.groovy @@ -14,7 +14,7 @@ // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. -suite("test_cte_name_reuse)") { +suite("test_cte_name_reuse") { sql "SET enable_nereids_planner=true" sql "SET enable_pipeline_engine=true" sql "SET enable_fallback_to_original_planner=false" diff --git a/regression-test/suites/nereids_rules_p0/defer_materialize_topn/one_phase.groovy b/regression-test/suites/nereids_rules_p0/defer_materialize_topn/one_phase.groovy new file mode 100644 index 00000000000000..c7b106eff2a289 --- /dev/null +++ b/regression-test/suites/nereids_rules_p0/defer_materialize_topn/one_phase.groovy @@ -0,0 +1,50 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +suite("one_phase") { + sql """ + drop table if exists users; + + CREATE TABLE `users` ( + `UserID` bigint NULL + ) ENGINE=OLAP + DUPLICATE KEY(`UserID`) + DISTRIBUTED BY HASH(`UserID`) BUCKETS 48 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "min_load_replica_num" = "-1", + "is_being_synced" = "false", + "storage_medium" = "hdd", + "storage_format" = "V2", + "inverted_index_storage_format" = "V2", + "light_schema_change" = "true", + "disable_auto_compaction" = "false", + "enable_single_replica_compaction" = "false", + "group_commit_interval_ms" = "10000", + "group_commit_data_bytes" = "134217728" + ); + + insert into users values (11111),(11112),(11113); + + """ + + sql "set sort_phase_num=1;" + qt_1 "select userid from users order by userid limit 2, 109000000;" + + sql "set sort_phase_num=2;" + qt_2 "select userid from users order by userid limit 2, 109000000;" + +} \ No newline at end of file From c87180a3e430df3f43cf734e88b04ab1aa55b3cd Mon Sep 17 00:00:00 2001 From: hui lai Date: Fri, 20 Dec 2024 15:27:46 +0800 Subject: [PATCH 30/82] [fix](cloud) fix abort transaction in runningTxns list when show routine load (#45629) There are some abort transaction in running transaction list when execute `Show routine load`: ``` statistic: {"receivedBytes":690347731,"runningTxns":[84983868539904,85435786230784,85005343163392,85437129268225,85454778056704,85435116123136,85025611246592,85437060583424,85434241746944,85415318736896,85465045433344,84985143969794,85004337471488,85415183878144,85415385197568,85424109151232,85004808868865,85005412474880,85025545732096,85414981022720,84984677082113,85436924459012],"errorRows":0,"committedTaskNum":211,"loadedRows":3612290,"loadRowsRate":195026,"abortedTaskNum":1,"errorRowsAfterResumed":0,"totalRows":3612290,"unselectedRows":0,"receivedBytesRate":37271770,"taskExecuteTimeMs":18522} ``` When abort transaction to meta service, transaction info in `abortTxnResponse `would be default value when abort transaction failed.Then this logic will invalid for transaction id is default value: ``` this.jobStatistic.runningTxnIds.remove(txnState.getTransactionId()); ``` --- .../doris/cloud/transaction/CloudGlobalTransactionMgr.java | 1 + 1 file changed, 1 insertion(+) diff --git a/fe/fe-core/src/main/java/org/apache/doris/cloud/transaction/CloudGlobalTransactionMgr.java b/fe/fe-core/src/main/java/org/apache/doris/cloud/transaction/CloudGlobalTransactionMgr.java index 11a3f05ead70c4..b9425245f421b2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/cloud/transaction/CloudGlobalTransactionMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/cloud/transaction/CloudGlobalTransactionMgr.java @@ -1288,6 +1288,7 @@ private void handleAfterAbort(AbortTxnResponse abortTxnResponse, TxnCommitAttach if (txnCommitAttachment != null && txnCommitAttachment instanceof RLTaskTxnCommitAttachment) { RLTaskTxnCommitAttachment rlTaskTxnCommitAttachment = (RLTaskTxnCommitAttachment) txnCommitAttachment; callbackId = rlTaskTxnCommitAttachment.getJobId(); + txnState.setTransactionId(transactionId); } cb = callbackFactory.getCallback(callbackId); From fef962aea6cc38c59e548faa87f8c70ca5552873 Mon Sep 17 00:00:00 2001 From: hui lai Date: Fri, 20 Dec 2024 15:29:21 +0800 Subject: [PATCH 31/82] [fix](routine load) make routine load delay eof schedule work (#45528) --- .../load/routineload/KafkaRoutineLoadJob.java | 2 +- .../doris/load/routineload/KafkaTaskInfo.java | 9 +- .../load/routineload/RoutineLoadTaskInfo.java | 9 +- .../routineload/KafkaRoutineLoadJobTest.java | 2 +- .../RoutineLoadTaskSchedulerTest.java | 2 +- .../transaction/GlobalTransactionMgrTest.java | 4 +- .../load_p0/routine_load/data/test_eof.csv | 1 + .../routine_load/test_routine_load_eof.groovy | 178 ++++++++++++++++++ 8 files changed, 195 insertions(+), 12 deletions(-) create mode 100644 regression-test/suites/load_p0/routine_load/data/test_eof.csv create mode 100644 regression-test/suites/load_p0/routine_load/test_routine_load_eof.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/KafkaRoutineLoadJob.java b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/KafkaRoutineLoadJob.java index 6bdef3301a610e..d0843eb92044f0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/KafkaRoutineLoadJob.java +++ b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/KafkaRoutineLoadJob.java @@ -235,7 +235,7 @@ public void divideRoutineLoadJob(int currentConcurrentTaskNum) throws UserExcept } KafkaTaskInfo kafkaTaskInfo = new KafkaTaskInfo(UUID.randomUUID(), id, maxBatchIntervalS * Config.routine_load_task_timeout_multiplier * 1000, - taskKafkaProgress, isMultiTable()); + taskKafkaProgress, isMultiTable(), -1, false); routineLoadTaskInfoList.add(kafkaTaskInfo); result.add(kafkaTaskInfo); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/KafkaTaskInfo.java b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/KafkaTaskInfo.java index f1578269529a12..e3292dc671f8b4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/KafkaTaskInfo.java +++ b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/KafkaTaskInfo.java @@ -49,16 +49,17 @@ public class KafkaTaskInfo extends RoutineLoadTaskInfo { private Map partitionIdToOffset; public KafkaTaskInfo(UUID id, long jobId, - long timeoutMs, Map partitionIdToOffset, boolean isMultiTable) { - super(id, jobId, timeoutMs, isMultiTable); + long timeoutMs, Map partitionIdToOffset, boolean isMultiTable, + long lastScheduledTime, boolean isEof) { + super(id, jobId, timeoutMs, isMultiTable, lastScheduledTime, isEof); this.partitionIdToOffset = partitionIdToOffset; } public KafkaTaskInfo(KafkaTaskInfo kafkaTaskInfo, Map partitionIdToOffset, boolean isMultiTable) { super(UUID.randomUUID(), kafkaTaskInfo.getJobId(), - kafkaTaskInfo.getTimeoutMs(), kafkaTaskInfo.getBeId(), isMultiTable); + kafkaTaskInfo.getTimeoutMs(), kafkaTaskInfo.getBeId(), isMultiTable, + kafkaTaskInfo.getLastScheduledTime(), kafkaTaskInfo.getIsEof()); this.partitionIdToOffset = partitionIdToOffset; - this.isEof = kafkaTaskInfo.getIsEof(); } public List getPartitions() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadTaskInfo.java b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadTaskInfo.java index 1ff825d97b9d17..5075311299d603 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadTaskInfo.java +++ b/fe/fe-core/src/main/java/org/apache/doris/load/routineload/RoutineLoadTaskInfo.java @@ -79,17 +79,20 @@ public abstract class RoutineLoadTaskInfo { // so that user or other logic can know the status of the corresponding txn. protected TransactionStatus txnStatus = TransactionStatus.UNKNOWN; - public RoutineLoadTaskInfo(UUID id, long jobId, long timeoutMs, boolean isMultiTable) { + public RoutineLoadTaskInfo(UUID id, long jobId, long timeoutMs, boolean isMultiTable, + long lastScheduledTime, boolean isEof) { this.id = id; this.jobId = jobId; this.createTimeMs = System.currentTimeMillis(); this.timeoutMs = timeoutMs; this.isMultiTable = isMultiTable; + this.lastScheduledTime = lastScheduledTime; + this.isEof = isEof; } public RoutineLoadTaskInfo(UUID id, long jobId, long timeoutMs, long previousBeId, - boolean isMultiTable) { - this(id, jobId, timeoutMs, isMultiTable); + boolean isMultiTable, long lastScheduledTime, boolean isEof) { + this(id, jobId, timeoutMs, isMultiTable, lastScheduledTime, isEof); this.previousBeId = previousBeId; } diff --git a/fe/fe-core/src/test/java/org/apache/doris/load/routineload/KafkaRoutineLoadJobTest.java b/fe/fe-core/src/test/java/org/apache/doris/load/routineload/KafkaRoutineLoadJobTest.java index 20cb626ff37055..63452a5d59ca11 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/load/routineload/KafkaRoutineLoadJobTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/load/routineload/KafkaRoutineLoadJobTest.java @@ -225,7 +225,7 @@ public void testProcessTimeOutTasks(@Injectable GlobalTransactionMgr globalTrans Map partitionIdsToOffset = Maps.newHashMap(); partitionIdsToOffset.put(100, 0L); KafkaTaskInfo kafkaTaskInfo = new KafkaTaskInfo(new UUID(1, 1), 1L, - maxBatchIntervalS * 2 * 1000, partitionIdsToOffset, false); + maxBatchIntervalS * 2 * 1000, partitionIdsToOffset, false, -1, false); kafkaTaskInfo.setExecuteStartTimeMs(System.currentTimeMillis() - maxBatchIntervalS * 2 * 1000 - 1); routineLoadTaskInfoList.add(kafkaTaskInfo); diff --git a/fe/fe-core/src/test/java/org/apache/doris/load/routineload/RoutineLoadTaskSchedulerTest.java b/fe/fe-core/src/test/java/org/apache/doris/load/routineload/RoutineLoadTaskSchedulerTest.java index 95c2423de71fa9..6e11fc5f71a5c5 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/load/routineload/RoutineLoadTaskSchedulerTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/load/routineload/RoutineLoadTaskSchedulerTest.java @@ -70,7 +70,7 @@ public void testRunOneCycle(@Injectable KafkaRoutineLoadJob kafkaRoutineLoadJob1 LinkedBlockingDeque routineLoadTaskInfoQueue = new LinkedBlockingDeque<>(); KafkaTaskInfo routineLoadTaskInfo1 = new KafkaTaskInfo(new UUID(1, 1), 1L, 20000, - partitionIdToOffset, false); + partitionIdToOffset, false, -1, false); routineLoadTaskInfoQueue.addFirst(routineLoadTaskInfo1); Map idToRoutineLoadTask = Maps.newHashMap(); diff --git a/fe/fe-core/src/test/java/org/apache/doris/transaction/GlobalTransactionMgrTest.java b/fe/fe-core/src/test/java/org/apache/doris/transaction/GlobalTransactionMgrTest.java index 420800a4bb3bd0..c4ec468c651856 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/transaction/GlobalTransactionMgrTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/transaction/GlobalTransactionMgrTest.java @@ -302,7 +302,7 @@ public void testCommitRoutineLoadTransaction(@Injectable TabletCommitInfo tablet Map partitionIdToOffset = Maps.newHashMap(); partitionIdToOffset.put(1, 0L); KafkaTaskInfo routineLoadTaskInfo = new KafkaTaskInfo(UUID.randomUUID(), 1L, 20000, - partitionIdToOffset, false); + partitionIdToOffset, false, -1, false); Deencapsulation.setField(routineLoadTaskInfo, "txnId", 1L); routineLoadTaskInfoList.add(routineLoadTaskInfo); TransactionState transactionState = new TransactionState(1L, Lists.newArrayList(1L), 1L, "label", null, @@ -368,7 +368,7 @@ public void testCommitRoutineLoadTransactionWithErrorMax(@Injectable TabletCommi Map partitionIdToOffset = Maps.newHashMap(); partitionIdToOffset.put(1, 0L); KafkaTaskInfo routineLoadTaskInfo = new KafkaTaskInfo(UUID.randomUUID(), 1L, 20000, - partitionIdToOffset, false); + partitionIdToOffset, false, -1, false); Deencapsulation.setField(routineLoadTaskInfo, "txnId", 1L); routineLoadTaskInfoList.add(routineLoadTaskInfo); TransactionState transactionState = new TransactionState(1L, Lists.newArrayList(1L), 1L, "label", null, diff --git a/regression-test/suites/load_p0/routine_load/data/test_eof.csv b/regression-test/suites/load_p0/routine_load/data/test_eof.csv new file mode 100644 index 00000000000000..bc857cabcfdb5c --- /dev/null +++ b/regression-test/suites/load_p0/routine_load/data/test_eof.csv @@ -0,0 +1 @@ +57|2023-08-19|TRUE|2|-25462|-74112029|6458082754318544493|-7910671781690629051|-15205.859375|-306870797.484914|759730669.0|-628556336.0|2023-07-10 18:39:10|2023-02-12|2023-01-27 07:26:06|y||Xi9nDVrLv8m6AwEpUxmtzFAuK48sQ|{"name": "John", "age": 25, "city": "New York"} \ No newline at end of file diff --git a/regression-test/suites/load_p0/routine_load/test_routine_load_eof.groovy b/regression-test/suites/load_p0/routine_load/test_routine_load_eof.groovy new file mode 100644 index 00000000000000..6eeb9a4e51c7b4 --- /dev/null +++ b/regression-test/suites/load_p0/routine_load/test_routine_load_eof.groovy @@ -0,0 +1,178 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.apache.kafka.clients.admin.AdminClient +import org.apache.kafka.clients.producer.KafkaProducer +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.clients.producer.ProducerConfig + +suite("test_routine_load_eof","p0") { + def kafkaCsvTpoics = [ + "test_eof", + ] + + String enabled = context.config.otherConfigs.get("enableKafkaTest") + String kafka_port = context.config.otherConfigs.get("kafka_port") + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + def kafka_broker = "${externalEnvIp}:${kafka_port}" + + if (enabled != null && enabled.equalsIgnoreCase("true")) { + def thread = Thread.start { + // define kafka + def props = new Properties() + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "${kafka_broker}".toString()) + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer") + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer") + // Create kafka producer + def producer = new KafkaProducer<>(props) + + while(true) { + Thread.sleep(1000) + for (String kafkaCsvTopic in kafkaCsvTpoics) { + def txt = new File("""${context.file.parent}/data/${kafkaCsvTopic}.csv""").text + def lines = txt.readLines() + lines.each { line -> + logger.info("=====${line}========") + def record = new ProducerRecord<>(kafkaCsvTopic, null, line) + producer.send(record) + } + } + } + } + + sleep(2 * 1000) + + def jobName = "testEof" + def tableName = "test_routine_load_eof" + try { + sql """ + CREATE TABLE IF NOT EXISTS ${tableName} + ( + k00 INT NOT NULL, + k01 DATE NOT NULL, + k02 BOOLEAN NULL, + k03 TINYINT NULL, + k04 SMALLINT NULL, + k05 INT NULL, + k06 BIGINT NULL, + k07 LARGEINT NULL, + k08 FLOAT NULL, + k09 DOUBLE NULL, + k10 DECIMAL(9,1) NULL, + k11 DECIMALV3(9,1) NULL, + k12 DATETIME NULL, + k13 DATEV2 NULL, + k14 DATETIMEV2 NULL, + k15 CHAR NULL, + k16 VARCHAR NULL, + k17 STRING NULL, + k18 JSON NULL, + kd01 BOOLEAN NOT NULL DEFAULT "TRUE", + kd02 TINYINT NOT NULL DEFAULT "1", + kd03 SMALLINT NOT NULL DEFAULT "2", + kd04 INT NOT NULL DEFAULT "3", + kd05 BIGINT NOT NULL DEFAULT "4", + kd06 LARGEINT NOT NULL DEFAULT "5", + kd07 FLOAT NOT NULL DEFAULT "6.0", + kd08 DOUBLE NOT NULL DEFAULT "7.0", + kd09 DECIMAL NOT NULL DEFAULT "888888888", + kd10 DECIMALV3 NOT NULL DEFAULT "999999999", + kd11 DATE NOT NULL DEFAULT "2023-08-24", + kd12 DATETIME NOT NULL DEFAULT "2023-08-24 12:00:00", + kd13 DATEV2 NOT NULL DEFAULT "2023-08-24", + kd14 DATETIMEV2 NOT NULL DEFAULT "2023-08-24 12:00:00", + kd15 CHAR(255) NOT NULL DEFAULT "我能吞下玻璃而不伤身体", + kd16 VARCHAR(300) NOT NULL DEFAULT "我能吞下玻璃而不伤身体", + kd17 STRING NOT NULL DEFAULT "我能吞下玻璃而不伤身体", + kd18 JSON NULL, + + INDEX idx_inverted_k104 (`k05`) USING INVERTED, + INDEX idx_inverted_k110 (`k11`) USING INVERTED, + INDEX idx_inverted_k113 (`k13`) USING INVERTED, + INDEX idx_inverted_k114 (`k14`) USING INVERTED, + INDEX idx_inverted_k117 (`k17`) USING INVERTED PROPERTIES("parser" = "english"), + INDEX idx_ngrambf_k115 (`k15`) USING NGRAM_BF PROPERTIES("gram_size"="3", "bf_size"="256"), + INDEX idx_ngrambf_k116 (`k16`) USING NGRAM_BF PROPERTIES("gram_size"="3", "bf_size"="256"), + INDEX idx_ngrambf_k117 (`k17`) USING NGRAM_BF PROPERTIES("gram_size"="3", "bf_size"="256"), + + INDEX idx_bitmap_k104 (`k02`) USING BITMAP, + INDEX idx_bitmap_k110 (`kd01`) USING BITMAP + + ) + DUPLICATE KEY(k00) + PARTITION BY RANGE(k01) + ( + PARTITION p1 VALUES [('2023-08-01'), ('2023-08-11')), + PARTITION p2 VALUES [('2023-08-11'), ('2023-08-21')), + PARTITION p3 VALUES [('2023-08-21'), ('2023-09-01')) + ) + DISTRIBUTED BY HASH(k00) BUCKETS 32 + PROPERTIES ( + "bloom_filter_columns"="k05", + "replication_num" = "1" + ); + """ + sql "sync" + + sql """ + CREATE ROUTINE LOAD ${jobName} on ${tableName} + COLUMNS(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18), + COLUMNS TERMINATED BY "|" + PROPERTIES + ( + "max_batch_interval" = "5", + "max_batch_rows" = "300000", + "max_batch_size" = "209715200" + ) + FROM KAFKA + ( + "kafka_broker_list" = "${externalEnvIp}:${kafka_port}", + "kafka_topic" = "test_eof", + "property.kafka_default_offsets" = "OFFSET_BEGINNING" + ); + """ + sql "sync" + + def count = 0 + while (true) { + sleep(1000) + def res = sql "show routine load for ${jobName}" + def state = res[0][8].toString() + if (state != "RUNNING") { + count++ + if (count > 60) { + assertEquals(1, 2) + } + continue; + } + break; + } + sleep(60 * 1000) + def res = sql "show routine load for ${jobName}" + def statistic = res[0][14].toString() + def json = parseJson(res[0][14]) + log.info("routine load statistic: ${res[0][14].toString()}".toString()) + if (json.committedTaskNum > 20) { + assertEquals(1, 2) + } + } finally { + sql "stop routine load for ${jobName}" + sql "DROP TABLE IF EXISTS ${tableName}" + } + thread.interrupt() + } +} \ No newline at end of file From fc512ab97715f3882d848f62e577f03383976ffd Mon Sep 17 00:00:00 2001 From: zhangdong Date: Fri, 20 Dec 2024 15:30:03 +0800 Subject: [PATCH 32/82] [enhance](mtmv)Change the way to verify the existence of partition names when refreshing MTMV (#45290) ### What problem does this PR solve? Previously, when refreshing the materialized view according to the partition name, the existing partition of the materialized view was used to verify whether the partition name existed After the change, the partition that should be present after refreshing the materialized view is used for verification --- .../plans/commands/info/RefreshMTMVInfo.java | 39 +++++++++- .../test_refresh_partition_name_mtmv.groovy | 76 +++++++++++++++++++ 2 files changed, 114 insertions(+), 1 deletion(-) create mode 100644 regression-test/suites/mtmv_p0/test_refresh_partition_name_mtmv.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/RefreshMTMVInfo.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/RefreshMTMVInfo.java index bf483f87a152c3..94e9243306989a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/RefreshMTMVInfo.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/RefreshMTMVInfo.java @@ -17,23 +17,33 @@ package org.apache.doris.nereids.trees.plans.commands.info; +import org.apache.doris.analysis.AllPartitionDesc; +import org.apache.doris.analysis.SinglePartitionDesc; import org.apache.doris.catalog.Database; import org.apache.doris.catalog.Env; import org.apache.doris.catalog.MTMV; +import org.apache.doris.catalog.TableIf; import org.apache.doris.catalog.TableIf.TableType; import org.apache.doris.common.DdlException; import org.apache.doris.common.ErrorCode; import org.apache.doris.common.MetaNotFoundException; +import org.apache.doris.common.util.MetaLockUtils; +import org.apache.doris.mtmv.MTMVPartitionInfo.MTMVPartitionType; import org.apache.doris.mtmv.MTMVPartitionUtil; +import org.apache.doris.mtmv.MTMVRelatedTableIf; import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.util.Utils; import org.apache.doris.qe.ConnectContext; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; import org.apache.commons.collections.CollectionUtils; +import java.util.Comparator; import java.util.List; import java.util.Objects; +import java.util.Set; /** * refresh mtmv info @@ -67,13 +77,40 @@ public void analyze(ConnectContext ctx) { Database db = Env.getCurrentInternalCatalog().getDbOrDdlException(mvName.getDb()); MTMV mtmv = (MTMV) db.getTableOrMetaException(mvName.getTbl(), TableType.MATERIALIZED_VIEW); if (!CollectionUtils.isEmpty(partitions)) { - MTMVPartitionUtil.getPartitionsIdsByNames(mtmv, partitions); + checkPartitionExist(mtmv); } } catch (org.apache.doris.common.AnalysisException | MetaNotFoundException | DdlException e) { throw new AnalysisException(e.getMessage()); } } + private void checkPartitionExist(MTMV mtmv) throws org.apache.doris.common.AnalysisException { + MTMVRelatedTableIf relatedTable = mtmv.getMvPartitionInfo().getRelatedTable(); + List tables = Lists.newArrayList(mtmv, relatedTable); + tables.sort(Comparator.comparing(TableIf::getId)); + MetaLockUtils.readLockTables(tables); + try { + if (mtmv.getMvPartitionInfo().getPartitionType().equals(MTMVPartitionType.SELF_MANAGE)) { + throw new AnalysisException( + "The partition method of this asynchronous materialized view " + + "does not support refreshing by partition"); + } + List partitionDescs = MTMVPartitionUtil.getPartitionDescsByRelatedTable( + mtmv.getTableProperty().getProperties(), mtmv.getMvPartitionInfo(), mtmv.getMvProperties()); + Set shouldExistPartitionNames = Sets.newHashSetWithExpectedSize(partitionDescs.size()); + partitionDescs.stream().forEach(desc -> { + shouldExistPartitionNames.add(((SinglePartitionDesc) desc).getPartitionName()); + }); + for (String partition : partitions) { + if (!shouldExistPartitionNames.contains(partition)) { + throw new org.apache.doris.common.AnalysisException("partition not exist: " + partition); + } + } + } finally { + MetaLockUtils.readUnlockTables(tables); + } + } + /** * getMvName * diff --git a/regression-test/suites/mtmv_p0/test_refresh_partition_name_mtmv.groovy b/regression-test/suites/mtmv_p0/test_refresh_partition_name_mtmv.groovy new file mode 100644 index 00000000000000..a18945339838d9 --- /dev/null +++ b/regression-test/suites/mtmv_p0/test_refresh_partition_name_mtmv.groovy @@ -0,0 +1,76 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_refresh_partition_name_mtmv","mtmv") { + String suiteName = "test_refresh_partition_name_mtmv" + String tableName = "${suiteName}_table" + String mvName = "${suiteName}_mv" + sql """drop table if exists `${tableName}`""" + sql """drop materialized view if exists ${mvName};""" + + sql """ + CREATE TABLE ${tableName} + ( + k2 TINYINT, + k3 INT not null + ) + COMMENT "my first table" + PARTITION BY LIST(`k3`) + ( + PARTITION `p1` VALUES IN ('1'), + PARTITION `p2` VALUES IN ('2'), + PARTITION `p3` VALUES IN ('3') + ) + DISTRIBUTED BY HASH(k2) BUCKETS 2 + PROPERTIES ( + "replication_num" = "1" + ); + """ + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`k3`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ( + 'replication_num' = '1', + 'refresh_partition_num' = '2' + ) + AS + SELECT * from ${tableName}; + """ + + test { + sql """ + REFRESH MATERIALIZED VIEW ${mvName} partitions(p_4) + """ + exception "partition not exist" + } + + sql """ + alter table ${tableName} add PARTITION `p4` VALUES IN ('4') + """ + sql """ + REFRESH MATERIALIZED VIEW ${mvName} partitions(p_4) + """ + + waitingMTMVTaskFinishedByMvName(mvName) + + sql """drop table if exists `${tableName}`""" + sql """drop materialized view if exists ${mvName};""" +} From 9bde47c7ab39f3de18b1e47c08f39c2fd820e9eb Mon Sep 17 00:00:00 2001 From: KassieZ <139741991+KassieZ@users.noreply.github.com> Date: Fri, 20 Dec 2024 15:57:52 +0800 Subject: [PATCH 33/82] [docs](readme) Update README (#43283) ### What problem does this PR solve? Issue Number: close #xxx Related PR: #xxx Problem Summary: Update README about v3.X release ### Check List (For Committer) - Test - [ ] Regression test - [ ] Unit Test - [ ] Manual test (add detailed scripts or steps below) - [ ] No need to test or manual test. Explain why: - [ ] This is a refactor/code format and no logic has been changed. - [ ] Previous test can cover this change. - [ ] No colde files have been changed. - [ ] Other reason - Behavior changed: - [ ] No. - [ ] Yes. - Does this need documentation? - [ ] No. - [ ] Yes. - Release note None ### Check List (For Reviewer who merge this PR) - [ ] Confirm the release note - [ ] Confirm test cases - [ ] Confirm document - [ ] Add branch pick label --------- Co-authored-by: Jeffrey --- README.md | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 94f9f4b777f8f5..3d264ee13ed8ad 100644 --- a/README.md +++ b/README.md @@ -59,12 +59,9 @@ Apache Doris is an easy-to-use, high-performance and real-time analytical databa All this makes Apache Doris an ideal tool for scenarios including report analysis, ad-hoc query, unified data warehouse, and data lake query acceleration. On Apache Doris, users can build various applications, such as user behavior analysis, AB test platform, log retrieval analysis, user portrait analysis, and order analysis. -🎉 Version 2.1.4 released now. Check out the 🔗[Release Notes](https://doris.apache.org/docs/releasenotes/release-2.1.4) here. The 2.1 verison delivers exceptional performance with 100% higher out-of-the-box queries proven by TPC-DS 1TB tests, enhanced data lake analytics that are 4-6 times speedier than Trino and Spark, solid support for semi-structured data analysis with new Variant types and suite of analytical functions, asynchronous materialized views for query acceleration, optimized real-time writing at scale, and better workload management with stability and runtime SQL resource tracking. +🎉 Check out the 🔗[All releases](https://doris.apache.org/docs/releasenotes/all-release), where you'll find a chronological summary of Apache Doris versions released over the past year. - -🎉 Version 2.0.12 is now released ! This fully evolved and stable release is ready for all users to upgrade. Check out the 🔗[Release Notes](https://doris.apache.org/docs/2.0/releasenotes/release-2.0.12) here. - -👀 Have a look at the 🔗[Official Website](https://doris.apache.org/) for a comprehensive list of Apache Doris's core features, blogs and user cases. +👀 Explore the 🔗[Official Website](https://doris.apache.org/) to discover Apache Doris's core features, blogs, and user cases in detail. ## 📈 Usage Scenarios From 62a6360a9881743a501d7e5a74063abebadc14a8 Mon Sep 17 00:00:00 2001 From: lihangyu Date: Fri, 20 Dec 2024 16:33:15 +0800 Subject: [PATCH 34/82] [Optimize](Variant) optimize schema update performance (#45480) When update schema with high concurrency, updaing schemas cost is expensive. 1. update schema only when rows is not 0 2. copy_from is expensive, use copy constructor --- .../olap/rowset/segment_v2/segment_writer.cpp | 4 +++- be/src/olap/rowset_builder.cpp | 24 ++++++++++--------- be/src/olap/tablet_schema.cpp | 15 ++++++++++++ be/src/olap/tablet_schema.h | 3 +++ be/src/vec/common/schema_util.cpp | 5 ++-- 5 files changed, 36 insertions(+), 15 deletions(-) diff --git a/be/src/olap/rowset/segment_v2/segment_writer.cpp b/be/src/olap/rowset/segment_v2/segment_writer.cpp index fe465f98a2aad2..2457a44de39e10 100644 --- a/be/src/olap/rowset/segment_v2/segment_writer.cpp +++ b/be/src/olap/rowset/segment_v2/segment_writer.cpp @@ -363,7 +363,9 @@ Status SegmentWriter::append_block_with_variant_subcolumns(vectorized::Block& da continue; } if (_flush_schema == nullptr) { - _flush_schema = std::make_shared(*_tablet_schema); + _flush_schema = std::make_shared(); + // deep copy + _flush_schema->copy_from(*_tablet_schema); } auto column_ref = data.get_by_position(i).column; const vectorized::ColumnObject& object_column = assert_cast( diff --git a/be/src/olap/rowset_builder.cpp b/be/src/olap/rowset_builder.cpp index ec7463d5b9d75d..ccc006e1f040a6 100644 --- a/be/src/olap/rowset_builder.cpp +++ b/be/src/olap/rowset_builder.cpp @@ -346,21 +346,22 @@ Status RowsetBuilder::commit_txn() { SCOPED_TIMER(_commit_txn_timer); const RowsetWriterContext& rw_ctx = _rowset_writer->context(); - if (rw_ctx.tablet_schema->num_variant_columns() > 0) { + if (rw_ctx.tablet_schema->num_variant_columns() > 0 && _rowset->num_rows() > 0) { // Need to merge schema with `rw_ctx.merged_tablet_schema` in prior, // merged schema keeps the newest merged schema for the rowset, which is updated and merged // during flushing segments. if (rw_ctx.merged_tablet_schema != nullptr) { RETURN_IF_ERROR(tablet()->update_by_least_common_schema(rw_ctx.merged_tablet_schema)); + } else { + // We should merge rowset schema further, in case that the merged_tablet_schema maybe null + // when enable_memtable_on_sink_node is true, the merged_tablet_schema will not be passed to + // the destination backend. + // update tablet schema when meet variant columns, before commit_txn + // Eg. rowset schema: A(int), B(float), C(int), D(int) + // _tabelt->tablet_schema: A(bigint), B(double) + // => update_schema: A(bigint), B(double), C(int), D(int) + RETURN_IF_ERROR(tablet()->update_by_least_common_schema(rw_ctx.tablet_schema)); } - // We should merge rowset schema further, in case that the merged_tablet_schema maybe null - // when enable_memtable_on_sink_node is true, the merged_tablet_schema will not be passed to - // the destination backend. - // update tablet schema when meet variant columns, before commit_txn - // Eg. rowset schema: A(int), B(float), C(int), D(int) - // _tabelt->tablet_schema: A(bigint), B(double) - // => update_schema: A(bigint), B(double), C(int), D(int) - RETURN_IF_ERROR(tablet()->update_by_least_common_schema(rw_ctx.tablet_schema)); } // Transfer ownership of `PendingRowsetGuard` to `TxnManager` @@ -398,7 +399,6 @@ Status BaseRowsetBuilder::cancel() { void BaseRowsetBuilder::_build_current_tablet_schema(int64_t index_id, const OlapTableSchemaParam* table_schema_param, const TabletSchema& ori_tablet_schema) { - _tablet_schema->copy_from(ori_tablet_schema); // find the right index id int i = 0; auto indexes = table_schema_param->indexes(); @@ -407,11 +407,13 @@ void BaseRowsetBuilder::_build_current_tablet_schema(int64_t index_id, break; } } - if (!indexes.empty() && !indexes[i]->columns.empty() && indexes[i]->columns[0]->unique_id() >= 0) { + _tablet_schema->shawdow_copy_without_columns(ori_tablet_schema); _tablet_schema->build_current_tablet_schema(index_id, table_schema_param->version(), indexes[i], ori_tablet_schema); + } else { + _tablet_schema->copy_from(ori_tablet_schema); } if (_tablet_schema->schema_version() > ori_tablet_schema.schema_version()) { // After schema change, should include extracted column diff --git a/be/src/olap/tablet_schema.cpp b/be/src/olap/tablet_schema.cpp index 3ec5d22166477f..7b6b5f313c144e 100644 --- a/be/src/olap/tablet_schema.cpp +++ b/be/src/olap/tablet_schema.cpp @@ -1064,6 +1064,21 @@ void TabletSchema::copy_from(const TabletSchema& tablet_schema) { _table_id = tablet_schema.table_id(); } +void TabletSchema::shawdow_copy_without_columns(const TabletSchema& tablet_schema) { + *this = tablet_schema; + _field_path_to_index.clear(); + _field_name_to_index.clear(); + _field_id_to_index.clear(); + _num_columns = 0; + _num_variant_columns = 0; + _num_null_columns = 0; + _num_key_columns = 0; + _cols.clear(); + _vl_field_mem_size = 0; + // notice : do not ref columns + _column_cache_handlers.clear(); +} + void TabletSchema::update_index_info_from(const TabletSchema& tablet_schema) { for (auto& col : _cols) { if (col->unique_id() < 0) { diff --git a/be/src/olap/tablet_schema.h b/be/src/olap/tablet_schema.h index c813d6f0ef8722..3dfe055fbf4a89 100644 --- a/be/src/olap/tablet_schema.h +++ b/be/src/olap/tablet_schema.h @@ -330,6 +330,8 @@ class TabletSchema : public MetadataAdder { // Must make sure the row column is always the last column void add_row_column(); void copy_from(const TabletSchema& tablet_schema); + // lightweight copy, take care of lifecycle of TabletColumn + void shawdow_copy_without_columns(const TabletSchema& tablet_schema); void update_index_info_from(const TabletSchema& tablet_schema); std::string to_key() const; // get_metadata_size is only the memory of the TabletSchema itself, not include child objects. @@ -531,6 +533,7 @@ class TabletSchema : public MetadataAdder { private: friend bool operator==(const TabletSchema& a, const TabletSchema& b); friend bool operator!=(const TabletSchema& a, const TabletSchema& b); + TabletSchema(const TabletSchema&) = default; void clear_column_cache_handlers(); diff --git a/be/src/vec/common/schema_util.cpp b/be/src/vec/common/schema_util.cpp index 2b1c71c643d613..2b53fc2470290b 100644 --- a/be/src/vec/common/schema_util.cpp +++ b/be/src/vec/common/schema_util.cpp @@ -415,9 +415,8 @@ Status get_least_common_schema(const std::vector& schemas, // duplicated paths following the update_least_common_schema process. auto build_schema_without_extracted_columns = [&](const TabletSchemaSPtr& base_schema) { output_schema = std::make_shared(); - output_schema->copy_from(*base_schema); - // Merge columns from other schemas - output_schema->clear_columns(); + // not copy columns but only shadow copy other attributes + output_schema->shawdow_copy_without_columns(*base_schema); // Get all columns without extracted columns and collect variant col unique id for (const TabletColumnPtr& col : base_schema->columns()) { if (col->is_variant_type()) { From af11693f48f7406438471a79718b67b3f8851731 Mon Sep 17 00:00:00 2001 From: airborne12 Date: Fri, 20 Dec 2024 20:27:18 +0800 Subject: [PATCH 35/82] [opt](profile) add index page profile for io (#45675) --- be/src/olap/base_tablet.cpp | 6 +- be/src/olap/delete_bitmap_calculator.cpp | 5 +- be/src/olap/primary_key_index.cpp | 12 ++-- be/src/olap/primary_key_index.h | 12 ++-- .../segment_v2/bloom_filter_index_reader.cpp | 15 ++--- .../segment_v2/bloom_filter_index_reader.h | 12 ++-- .../olap/rowset/segment_v2/column_reader.cpp | 62 +++++++++++-------- be/src/olap/rowset/segment_v2/column_reader.h | 26 +++++--- .../segment_v2/indexed_column_reader.cpp | 11 ++-- .../rowset/segment_v2/indexed_column_reader.h | 4 +- .../rowset/segment_v2/ordinal_page_index.cpp | 16 +++-- .../rowset/segment_v2/ordinal_page_index.h | 5 +- be/src/olap/rowset/segment_v2/segment.cpp | 37 ++++++----- be/src/olap/rowset/segment_v2/segment.h | 12 ++-- .../rowset/segment_v2/segment_iterator.cpp | 2 +- .../olap/rowset/segment_v2/zone_map_index.cpp | 15 +++-- .../olap/rowset/segment_v2/zone_map_index.h | 6 +- be/test/olap/date_bloom_filter_test.cpp | 8 +-- be/test/olap/primary_key_index_test.cpp | 20 +++--- .../bloom_filter_index_reader_writer_test.cpp | 4 +- .../segment_v2/ordinal_page_index_test.cpp | 4 +- be/test/olap/segment_cache_test.cpp | 2 +- 22 files changed, 164 insertions(+), 132 deletions(-) diff --git a/be/src/olap/base_tablet.cpp b/be/src/olap/base_tablet.cpp index 33275a2663b329..a4720f89d19be6 100644 --- a/be/src/olap/base_tablet.cpp +++ b/be/src/olap/base_tablet.cpp @@ -499,7 +499,7 @@ Status BaseTablet::lookup_row_key(const Slice& encoded_key, TabletSchema* latest for (auto id : picked_segments) { Status s = segments[id]->lookup_row_key(encoded_key, schema, with_seq_col, with_rowid, - &loc, encoded_seq_value, stats); + &loc, stats, encoded_seq_value); if (s.is()) { continue; } @@ -615,7 +615,7 @@ Status BaseTablet::calc_segment_delete_bitmap(RowsetSharedPtr rowset, vectorized::Block ordered_block = block.clone_empty(); uint32_t pos = 0; - RETURN_IF_ERROR(seg->load_pk_index_and_bf()); // We need index blocks to iterate + RETURN_IF_ERROR(seg->load_pk_index_and_bf(nullptr)); // We need index blocks to iterate const auto* pk_idx = seg->get_primary_key_index(); int total = pk_idx->num_rows(); uint32_t row_id = 0; @@ -629,7 +629,7 @@ Status BaseTablet::calc_segment_delete_bitmap(RowsetSharedPtr rowset, std::vector> segment_caches(specified_rowsets.size()); while (remaining > 0) { std::unique_ptr iter; - RETURN_IF_ERROR(pk_idx->new_iterator(&iter)); + RETURN_IF_ERROR(pk_idx->new_iterator(&iter, nullptr)); size_t num_to_read = std::min(batch_size, remaining); auto index_type = vectorized::DataTypeFactory::instance().create_data_type( diff --git a/be/src/olap/delete_bitmap_calculator.cpp b/be/src/olap/delete_bitmap_calculator.cpp index 017e3cff3d0489..8ac05a1e393043 100644 --- a/be/src/olap/delete_bitmap_calculator.cpp +++ b/be/src/olap/delete_bitmap_calculator.cpp @@ -145,12 +145,11 @@ Status MergeIndexDeleteBitmapCalculator::init(RowsetId rowset_id, MergeIndexDeleteBitmapCalculatorContext::Comparator(seq_col_length, _rowid_length); _contexts.reserve(segments.size()); _heap = std::make_unique(_comparator); - for (auto& segment : segments) { - RETURN_IF_ERROR(segment->load_index()); + RETURN_IF_ERROR(segment->load_index(nullptr)); auto pk_idx = segment->get_primary_key_index(); std::unique_ptr index; - RETURN_IF_ERROR(pk_idx->new_iterator(&index)); + RETURN_IF_ERROR(pk_idx->new_iterator(&index, nullptr)); auto index_type = vectorized::DataTypeFactory::instance().create_data_type( pk_idx->type_info()->type(), 1, 0); _contexts.emplace_back(std::move(index), index_type, segment->id(), pk_idx->num_rows()); diff --git a/be/src/olap/primary_key_index.cpp b/be/src/olap/primary_key_index.cpp index 5f7bedb01fc8de..00b72832ee60e0 100644 --- a/be/src/olap/primary_key_index.cpp +++ b/be/src/olap/primary_key_index.cpp @@ -95,27 +95,29 @@ Status PrimaryKeyIndexBuilder::finalize(segment_v2::PrimaryKeyIndexMetaPB* meta) } Status PrimaryKeyIndexReader::parse_index(io::FileReaderSPtr file_reader, - const segment_v2::PrimaryKeyIndexMetaPB& meta) { + const segment_v2::PrimaryKeyIndexMetaPB& meta, + OlapReaderStatistics* pk_index_load_stats) { // parse primary key index _index_reader.reset(new segment_v2::IndexedColumnReader(file_reader, meta.primary_key_index())); _index_reader->set_is_pk_index(true); RETURN_IF_ERROR(_index_reader->load(!config::disable_pk_storage_page_cache, false, - _pk_index_load_stats)); + pk_index_load_stats)); _index_parsed = true; return Status::OK(); } Status PrimaryKeyIndexReader::parse_bf(io::FileReaderSPtr file_reader, - const segment_v2::PrimaryKeyIndexMetaPB& meta) { + const segment_v2::PrimaryKeyIndexMetaPB& meta, + OlapReaderStatistics* pk_index_load_stats) { // parse bloom filter segment_v2::ColumnIndexMetaPB column_index_meta = meta.bloom_filter_index(); segment_v2::BloomFilterIndexReader bf_index_reader(std::move(file_reader), column_index_meta.bloom_filter_index()); RETURN_IF_ERROR(bf_index_reader.load(!config::disable_pk_storage_page_cache, false, - _pk_index_load_stats)); + pk_index_load_stats)); std::unique_ptr bf_iter; - RETURN_IF_ERROR(bf_index_reader.new_iterator(&bf_iter)); + RETURN_IF_ERROR(bf_index_reader.new_iterator(&bf_iter, pk_index_load_stats)); RETURN_IF_ERROR(bf_iter->read_bloom_filter(0, &_bf)); segment_v2::g_pk_total_bloom_filter_num << 1; segment_v2::g_pk_total_bloom_filter_total_bytes << _bf->size(); diff --git a/be/src/olap/primary_key_index.h b/be/src/olap/primary_key_index.h index dcbbc5f30625f4..f74d3e42030f2f 100644 --- a/be/src/olap/primary_key_index.h +++ b/be/src/olap/primary_key_index.h @@ -98,8 +98,7 @@ class PrimaryKeyIndexBuilder { class PrimaryKeyIndexReader { public: - PrimaryKeyIndexReader(OlapReaderStatistics* pk_index_load_stats = nullptr) - : _index_parsed(false), _bf_parsed(false), _pk_index_load_stats(pk_index_load_stats) {} + PrimaryKeyIndexReader() : _index_parsed(false), _bf_parsed(false) {} ~PrimaryKeyIndexReader() { segment_v2::g_pk_total_bloom_filter_num << -static_cast(_bf_num); @@ -109,12 +108,14 @@ class PrimaryKeyIndexReader { } Status parse_index(io::FileReaderSPtr file_reader, - const segment_v2::PrimaryKeyIndexMetaPB& meta); + const segment_v2::PrimaryKeyIndexMetaPB& meta, + OlapReaderStatistics* pk_index_load_stats); - Status parse_bf(io::FileReaderSPtr file_reader, const segment_v2::PrimaryKeyIndexMetaPB& meta); + Status parse_bf(io::FileReaderSPtr file_reader, const segment_v2::PrimaryKeyIndexMetaPB& meta, + OlapReaderStatistics* pk_index_load_stats); Status new_iterator(std::unique_ptr* index_iterator, - OlapReaderStatistics* stats = nullptr) const { + OlapReaderStatistics* stats) const { DCHECK(_index_parsed); index_iterator->reset(new segment_v2::IndexedColumnIterator(_index_reader.get(), stats)); return Status::OK(); @@ -155,7 +156,6 @@ class PrimaryKeyIndexReader { std::unique_ptr _bf; size_t _bf_num = 0; uint64 _bf_bytes = 0; - OlapReaderStatistics* _pk_index_load_stats = nullptr; }; } // namespace doris diff --git a/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.cpp b/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.cpp index 8c63c25d20acee..7c51f0a24c1b1d 100644 --- a/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.cpp +++ b/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.cpp @@ -34,9 +34,8 @@ namespace segment_v2 { Status BloomFilterIndexReader::load(bool use_page_cache, bool kept_in_memory, OlapReaderStatistics* index_load_stats) { // TODO yyq: implement a new once flag to avoid status construct. - _index_load_stats = index_load_stats; - return _load_once.call([this, use_page_cache, kept_in_memory] { - return _load(use_page_cache, kept_in_memory); + return _load_once.call([this, use_page_cache, kept_in_memory, index_load_stats] { + return _load(use_page_cache, kept_in_memory, index_load_stats); }); } @@ -45,20 +44,22 @@ int64_t BloomFilterIndexReader::get_metadata_size() const { (_bloom_filter_index_meta ? _bloom_filter_index_meta->ByteSizeLong() : 0); } -Status BloomFilterIndexReader::_load(bool use_page_cache, bool kept_in_memory) { +Status BloomFilterIndexReader::_load(bool use_page_cache, bool kept_in_memory, + OlapReaderStatistics* index_load_stats) { const IndexedColumnMetaPB& bf_index_meta = _bloom_filter_index_meta->bloom_filter(); _bloom_filter_reader.reset(new IndexedColumnReader(_file_reader, bf_index_meta)); - RETURN_IF_ERROR(_bloom_filter_reader->load(use_page_cache, kept_in_memory, _index_load_stats)); + RETURN_IF_ERROR(_bloom_filter_reader->load(use_page_cache, kept_in_memory, index_load_stats)); update_metadata_size(); return Status::OK(); } -Status BloomFilterIndexReader::new_iterator(std::unique_ptr* iterator) { +Status BloomFilterIndexReader::new_iterator(std::unique_ptr* iterator, + OlapReaderStatistics* index_load_stats) { DBUG_EXECUTE_IF("BloomFilterIndexReader::new_iterator.fail", { return Status::InternalError("new_iterator for bloom filter index failed"); }); - iterator->reset(new BloomFilterIndexIterator(this)); + iterator->reset(new BloomFilterIndexIterator(this, index_load_stats)); return Status::OK(); } diff --git a/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.h b/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.h index fcb0239a2440fa..fb53af89c0fe92 100644 --- a/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.h +++ b/be/src/olap/rowset/segment_v2/bloom_filter_index_reader.h @@ -48,17 +48,18 @@ class BloomFilterIndexReader : public MetadataAdder { } Status load(bool use_page_cache, bool kept_in_memory, - OlapReaderStatistics* _bf_index_load_stats = nullptr); + OlapReaderStatistics* bf_index_load_stats); BloomFilterAlgorithmPB algorithm() { return _bloom_filter_index_meta->algorithm(); } // create a new column iterator. - Status new_iterator(std::unique_ptr* iterator); + Status new_iterator(std::unique_ptr* iterator, + OlapReaderStatistics* index_load_stats); const TypeInfo* type_info() const { return _type_info; } private: - Status _load(bool use_page_cache, bool kept_in_memory); + Status _load(bool use_page_cache, bool kept_in_memory, OlapReaderStatistics* index_load_stats); int64_t get_metadata_size() const override; @@ -70,13 +71,12 @@ class BloomFilterIndexReader : public MetadataAdder { const TypeInfo* _type_info = nullptr; std::unique_ptr _bloom_filter_index_meta = nullptr; std::unique_ptr _bloom_filter_reader; - OlapReaderStatistics* _index_load_stats = nullptr; }; class BloomFilterIndexIterator { public: - explicit BloomFilterIndexIterator(BloomFilterIndexReader* reader) - : _reader(reader), _bloom_filter_iter(reader->_bloom_filter_reader.get()) {} + explicit BloomFilterIndexIterator(BloomFilterIndexReader* reader, OlapReaderStatistics* stats) + : _reader(reader), _bloom_filter_iter(reader->_bloom_filter_reader.get(), stats) {} // Read bloom filter at the given ordinal into `bf`. Status read_bloom_filter(rowid_t ordinal, std::unique_ptr* bf); diff --git a/be/src/olap/rowset/segment_v2/column_reader.cpp b/be/src/olap/rowset/segment_v2/column_reader.cpp index 9d5328de869304..78c415530cd029 100644 --- a/be/src/olap/rowset/segment_v2/column_reader.cpp +++ b/be/src/olap/rowset/segment_v2/column_reader.cpp @@ -374,10 +374,12 @@ Status ColumnReader::read_page(const ColumnIteratorOptions& iter_opts, const Pag Status ColumnReader::get_row_ranges_by_zone_map( const AndBlockColumnPredicate* col_predicates, - const std::vector* delete_predicates, RowRanges* row_ranges) { + const std::vector* delete_predicates, RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts) { std::vector page_indexes; - RETURN_IF_ERROR(_get_filtered_pages(col_predicates, delete_predicates, &page_indexes)); - RETURN_IF_ERROR(_calculate_row_ranges(page_indexes, row_ranges)); + RETURN_IF_ERROR( + _get_filtered_pages(col_predicates, delete_predicates, &page_indexes, iter_opts)); + RETURN_IF_ERROR(_calculate_row_ranges(page_indexes, row_ranges, iter_opts)); return Status::OK(); } @@ -514,8 +516,8 @@ bool ColumnReader::_zone_map_match_condition(const ZoneMapPB& zone_map, Status ColumnReader::_get_filtered_pages( const AndBlockColumnPredicate* col_predicates, const std::vector* delete_predicates, - std::vector* page_indexes) { - RETURN_IF_ERROR(_load_zone_map_index(_use_index_page_cache, _opts.kept_in_memory)); + std::vector* page_indexes, const ColumnIteratorOptions& iter_opts) { + RETURN_IF_ERROR(_load_zone_map_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); FieldType type = _type_info->type(); const std::vector& zone_maps = _zone_map_index->page_zone_maps(); @@ -553,9 +555,10 @@ Status ColumnReader::_get_filtered_pages( } Status ColumnReader::_calculate_row_ranges(const std::vector& page_indexes, - RowRanges* row_ranges) { + RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts) { row_ranges->clear(); - RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory)); + RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); for (auto i : page_indexes) { ordinal_t page_first_id = _ordinal_index->get_first_ordinal(i); ordinal_t page_last_id = _ordinal_index->get_last_ordinal(i); @@ -566,12 +569,14 @@ Status ColumnReader::_calculate_row_ranges(const std::vector& page_ind } Status ColumnReader::get_row_ranges_by_bloom_filter(const AndBlockColumnPredicate* col_predicates, - RowRanges* row_ranges) { - RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory)); - RETURN_IF_ERROR(_load_bloom_filter_index(_use_index_page_cache, _opts.kept_in_memory)); + RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts) { + RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); + RETURN_IF_ERROR( + _load_bloom_filter_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); RowRanges bf_row_ranges; std::unique_ptr bf_iter; - RETURN_IF_ERROR(_bloom_filter_index->new_iterator(&bf_iter)); + RETURN_IF_ERROR(_bloom_filter_index->new_iterator(&bf_iter, iter_opts.stats)); size_t range_size = row_ranges->range_size(); // get covered page ids std::set page_ids; @@ -598,16 +603,18 @@ Status ColumnReader::get_row_ranges_by_bloom_filter(const AndBlockColumnPredicat return Status::OK(); } -Status ColumnReader::_load_ordinal_index(bool use_page_cache, bool kept_in_memory) { +Status ColumnReader::_load_ordinal_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts) { if (!_ordinal_index) { return Status::InternalError("ordinal_index not inited"); } - return _ordinal_index->load(use_page_cache, kept_in_memory); + return _ordinal_index->load(use_page_cache, kept_in_memory, iter_opts.stats); } -Status ColumnReader::_load_zone_map_index(bool use_page_cache, bool kept_in_memory) { +Status ColumnReader::_load_zone_map_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts) { if (_zone_map_index != nullptr) { - return _zone_map_index->load(use_page_cache, kept_in_memory); + return _zone_map_index->load(use_page_cache, kept_in_memory, iter_opts.stats); } return Status::OK(); } @@ -681,15 +688,17 @@ bool ColumnReader::has_bloom_filter_index(bool ngram) const { } } -Status ColumnReader::_load_bloom_filter_index(bool use_page_cache, bool kept_in_memory) { +Status ColumnReader::_load_bloom_filter_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts) { if (_bloom_filter_index != nullptr) { - return _bloom_filter_index->load(use_page_cache, kept_in_memory); + return _bloom_filter_index->load(use_page_cache, kept_in_memory, iter_opts.stats); } return Status::OK(); } -Status ColumnReader::seek_to_first(OrdinalPageIndexIterator* iter) { - RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory)); +Status ColumnReader::seek_to_first(OrdinalPageIndexIterator* iter, + const ColumnIteratorOptions& iter_opts) { + RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); *iter = _ordinal_index->begin(); if (!iter->valid()) { return Status::NotFound("Failed to seek to first rowid"); @@ -697,8 +706,9 @@ Status ColumnReader::seek_to_first(OrdinalPageIndexIterator* iter) { return Status::OK(); } -Status ColumnReader::seek_at_or_before(ordinal_t ordinal, OrdinalPageIndexIterator* iter) { - RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory)); +Status ColumnReader::seek_at_or_before(ordinal_t ordinal, OrdinalPageIndexIterator* iter, + const ColumnIteratorOptions& iter_opts) { + RETURN_IF_ERROR(_load_ordinal_index(_use_index_page_cache, _opts.kept_in_memory, iter_opts)); *iter = _ordinal_index->seek_at_or_before(ordinal); if (!iter->valid()) { return Status::NotFound("Failed to seek to ordinal {}, ", ordinal); @@ -1172,7 +1182,7 @@ Status FileColumnIterator::init(const ColumnIteratorOptions& opts) { FileColumnIterator::~FileColumnIterator() = default; Status FileColumnIterator::seek_to_first() { - RETURN_IF_ERROR(_reader->seek_to_first(&_page_iter)); + RETURN_IF_ERROR(_reader->seek_to_first(&_page_iter, _opts)); RETURN_IF_ERROR(_read_data_page(_page_iter)); _seek_to_pos_in_page(&_page, 0); @@ -1183,7 +1193,7 @@ Status FileColumnIterator::seek_to_first() { Status FileColumnIterator::seek_to_ordinal(ordinal_t ord) { // if current page contains this row, we don't need to seek if (!_page || !_page.contains(ord) || !_page_iter.valid()) { - RETURN_IF_ERROR(_reader->seek_at_or_before(ord, &_page_iter)); + RETURN_IF_ERROR(_reader->seek_at_or_before(ord, &_page_iter, _opts)); RETURN_IF_ERROR(_read_data_page(_page_iter)); } _seek_to_pos_in_page(&_page, ord - _page.first_ordinal); @@ -1431,8 +1441,8 @@ Status FileColumnIterator::get_row_ranges_by_zone_map( const AndBlockColumnPredicate* col_predicates, const std::vector* delete_predicates, RowRanges* row_ranges) { if (_reader->has_zone_map()) { - RETURN_IF_ERROR( - _reader->get_row_ranges_by_zone_map(col_predicates, delete_predicates, row_ranges)); + RETURN_IF_ERROR(_reader->get_row_ranges_by_zone_map(col_predicates, delete_predicates, + row_ranges, _opts)); } return Status::OK(); } @@ -1441,7 +1451,7 @@ Status FileColumnIterator::get_row_ranges_by_bloom_filter( const AndBlockColumnPredicate* col_predicates, RowRanges* row_ranges) { if ((col_predicates->can_do_bloom_filter(false) && _reader->has_bloom_filter_index(false)) || (col_predicates->can_do_bloom_filter(true) && _reader->has_bloom_filter_index(true))) { - RETURN_IF_ERROR(_reader->get_row_ranges_by_bloom_filter(col_predicates, row_ranges)); + RETURN_IF_ERROR(_reader->get_row_ranges_by_bloom_filter(col_predicates, row_ranges, _opts)); } return Status::OK(); } diff --git a/be/src/olap/rowset/segment_v2/column_reader.h b/be/src/olap/rowset/segment_v2/column_reader.h index d72d802f97769b..7e32b3a09b34da 100644 --- a/be/src/olap/rowset/segment_v2/column_reader.h +++ b/be/src/olap/rowset/segment_v2/column_reader.h @@ -148,8 +148,9 @@ class ColumnReader : public MetadataAdder { std::unique_ptr* iterator); // Seek to the first entry in the column. - Status seek_to_first(OrdinalPageIndexIterator* iter); - Status seek_at_or_before(ordinal_t ordinal, OrdinalPageIndexIterator* iter); + Status seek_to_first(OrdinalPageIndexIterator* iter, const ColumnIteratorOptions& iter_opts); + Status seek_at_or_before(ordinal_t ordinal, OrdinalPageIndexIterator* iter, + const ColumnIteratorOptions& iter_opts); // read a page from file into a page handle Status read_page(const ColumnIteratorOptions& iter_opts, const PagePointer& pp, @@ -175,11 +176,13 @@ class ColumnReader : public MetadataAdder { // - delete_condition is a delete predicate of one version Status get_row_ranges_by_zone_map(const AndBlockColumnPredicate* col_predicates, const std::vector* delete_predicates, - RowRanges* row_ranges); + RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts); // get row ranges with bloom filter index Status get_row_ranges_by_bloom_filter(const AndBlockColumnPredicate* col_predicates, - RowRanges* row_ranges); + RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts); PagePointer get_dict_page_pointer() const { return _meta_dict_page; } @@ -219,13 +222,16 @@ class ColumnReader : public MetadataAdder { return Status::OK(); } - [[nodiscard]] Status _load_zone_map_index(bool use_page_cache, bool kept_in_memory); - [[nodiscard]] Status _load_ordinal_index(bool use_page_cache, bool kept_in_memory); + [[nodiscard]] Status _load_zone_map_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts); + [[nodiscard]] Status _load_ordinal_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts); [[nodiscard]] Status _load_bitmap_index(bool use_page_cache, bool kept_in_memory); [[nodiscard]] Status _load_inverted_index_index( std::shared_ptr index_file_reader, const TabletIndex* index_meta); - [[nodiscard]] Status _load_bloom_filter_index(bool use_page_cache, bool kept_in_memory); + [[nodiscard]] Status _load_bloom_filter_index(bool use_page_cache, bool kept_in_memory, + const ColumnIteratorOptions& iter_opts); bool _zone_map_match_condition(const ZoneMapPB& zone_map, WrapperField* min_value_container, WrapperField* max_value_container, @@ -239,9 +245,11 @@ class ColumnReader : public MetadataAdder { Status _get_filtered_pages(const AndBlockColumnPredicate* col_predicates, const std::vector* delete_predicates, - std::vector* page_indexes); + std::vector* page_indexes, + const ColumnIteratorOptions& iter_opts); - Status _calculate_row_ranges(const std::vector& page_indexes, RowRanges* row_ranges); + Status _calculate_row_ranges(const std::vector& page_indexes, RowRanges* row_ranges, + const ColumnIteratorOptions& iter_opts); int64_t get_metadata_size() const override; diff --git a/be/src/olap/rowset/segment_v2/indexed_column_reader.cpp b/be/src/olap/rowset/segment_v2/indexed_column_reader.cpp index da6beff5d8d6a2..3f582293ee4d7f 100644 --- a/be/src/olap/rowset/segment_v2/indexed_column_reader.cpp +++ b/be/src/olap/rowset/segment_v2/indexed_column_reader.cpp @@ -66,7 +66,6 @@ Status IndexedColumnReader::load(bool use_page_cache, bool kept_in_memory, OlapReaderStatistics* index_load_stats) { _use_page_cache = use_page_cache; _kept_in_memory = kept_in_memory; - _index_load_stats = index_load_stats; _type_info = get_scalar_type_info((FieldType)_meta.data_type()); if (_type_info == nullptr) { @@ -82,7 +81,7 @@ Status IndexedColumnReader::load(bool use_page_cache, bool kept_in_memory, } else { RETURN_IF_ERROR(load_index_page(_meta.ordinal_index_meta().root_page(), &_ordinal_index_page_handle, - _ordinal_index_reader.get())); + _ordinal_index_reader.get(), index_load_stats)); _has_index_page = true; } } @@ -93,7 +92,8 @@ Status IndexedColumnReader::load(bool use_page_cache, bool kept_in_memory, _sole_data_page = PagePointer(_meta.value_index_meta().root_page()); } else { RETURN_IF_ERROR(load_index_page(_meta.value_index_meta().root_page(), - &_value_index_page_handle, _value_index_reader.get())); + &_value_index_page_handle, _value_index_reader.get(), + index_load_stats)); _has_index_page = true; } } @@ -104,13 +104,14 @@ Status IndexedColumnReader::load(bool use_page_cache, bool kept_in_memory, } Status IndexedColumnReader::load_index_page(const PagePointerPB& pp, PageHandle* handle, - IndexPageReader* reader) { + IndexPageReader* reader, + OlapReaderStatistics* index_load_stats) { Slice body; PageFooterPB footer; BlockCompressionCodec* local_compress_codec; RETURN_IF_ERROR(get_block_compression_codec(_meta.compression(), &local_compress_codec)); RETURN_IF_ERROR(read_page(PagePointer(pp), handle, &body, &footer, INDEX_PAGE, - local_compress_codec, false, _index_load_stats)); + local_compress_codec, false, index_load_stats)); RETURN_IF_ERROR(reader->parse(body, footer.index_page_footer())); _mem_size += body.get_size(); return Status::OK(); diff --git a/be/src/olap/rowset/segment_v2/indexed_column_reader.h b/be/src/olap/rowset/segment_v2/indexed_column_reader.h index c9640c0007c153..6e62feaafdcdd1 100644 --- a/be/src/olap/rowset/segment_v2/indexed_column_reader.h +++ b/be/src/olap/rowset/segment_v2/indexed_column_reader.h @@ -76,7 +76,8 @@ class IndexedColumnReader : public MetadataAdder { void set_is_pk_index(bool is_pk) { _is_pk_index = is_pk; } private: - Status load_index_page(const PagePointerPB& pp, PageHandle* handle, IndexPageReader* reader); + Status load_index_page(const PagePointerPB& pp, PageHandle* handle, IndexPageReader* reader, + OlapReaderStatistics* index_load_stats); int64_t get_metadata_size() const override; @@ -103,7 +104,6 @@ class IndexedColumnReader : public MetadataAdder { const KeyCoder* _value_key_coder = nullptr; uint64_t _mem_size = 0; bool _is_pk_index = false; - OlapReaderStatistics* _index_load_stats = nullptr; }; class IndexedColumnIterator { diff --git a/be/src/olap/rowset/segment_v2/ordinal_page_index.cpp b/be/src/olap/rowset/segment_v2/ordinal_page_index.cpp index 9ee82bacdd73d2..4995e779892646 100644 --- a/be/src/olap/rowset/segment_v2/ordinal_page_index.cpp +++ b/be/src/olap/rowset/segment_v2/ordinal_page_index.cpp @@ -69,15 +69,17 @@ Status OrdinalIndexWriter::finish(io::FileWriter* file_writer, ColumnIndexMetaPB return Status::OK(); } -Status OrdinalIndexReader::load(bool use_page_cache, bool kept_in_memory) { +Status OrdinalIndexReader::load(bool use_page_cache, bool kept_in_memory, + OlapReaderStatistics* index_load_stats) { // TODO yyq: implement a new once flag to avoid status construct. - return _load_once.call([this, use_page_cache, kept_in_memory] { - return _load(use_page_cache, kept_in_memory, std::move(_meta_pb)); + return _load_once.call([this, use_page_cache, kept_in_memory, index_load_stats] { + return _load(use_page_cache, kept_in_memory, std::move(_meta_pb), index_load_stats); }); } Status OrdinalIndexReader::_load(bool use_page_cache, bool kept_in_memory, - std::unique_ptr index_meta) { + std::unique_ptr index_meta, + OlapReaderStatistics* stats) { if (index_meta->root_page().is_root_data_page()) { // only one data page, no index page _num_pages = 1; @@ -88,6 +90,7 @@ Status OrdinalIndexReader::_load(bool use_page_cache, bool kept_in_memory, } // need to read index page OlapReaderStatistics tmp_stats; + OlapReaderStatistics* stats_ptr = stats != nullptr ? stats : &tmp_stats; PageReadOptions opts { .use_page_cache = use_page_cache, .kept_in_memory = kept_in_memory, @@ -96,8 +99,9 @@ Status OrdinalIndexReader::_load(bool use_page_cache, bool kept_in_memory, .page_pointer = PagePointer(index_meta->root_page().root_page()), // ordinal index page uses NO_COMPRESSION right now .codec = nullptr, - .stats = &tmp_stats, - .io_ctx = io::IOContext {.is_index_data = true}, + .stats = stats_ptr, + .io_ctx = io::IOContext {.is_index_data = true, + .file_cache_stats = &stats_ptr->file_cache_stats}, }; // read index page diff --git a/be/src/olap/rowset/segment_v2/ordinal_page_index.h b/be/src/olap/rowset/segment_v2/ordinal_page_index.h index 1d74cf989520aa..df60edb12d1481 100644 --- a/be/src/olap/rowset/segment_v2/ordinal_page_index.h +++ b/be/src/olap/rowset/segment_v2/ordinal_page_index.h @@ -75,7 +75,7 @@ class OrdinalIndexReader : public MetadataAdder { virtual ~OrdinalIndexReader(); // load and parse the index page into memory - Status load(bool use_page_cache, bool kept_in_memory); + Status load(bool use_page_cache, bool kept_in_memory, OlapReaderStatistics* index_load_stats); // the returned iter points to the largest element which is less than `ordinal`, // or points to the first element if all elements are greater than `ordinal`, @@ -94,7 +94,8 @@ class OrdinalIndexReader : public MetadataAdder { private: Status _load(bool use_page_cache, bool kept_in_memory, - std::unique_ptr index_meta); + std::unique_ptr index_meta, + OlapReaderStatistics* index_load_stats); int64_t get_metadata_size() const override; diff --git a/be/src/olap/rowset/segment_v2/segment.cpp b/be/src/olap/rowset/segment_v2/segment.cpp index d55d84901c2e66..b5ab3f0e873549 100644 --- a/be/src/olap/rowset/segment_v2/segment.cpp +++ b/be/src/olap/rowset/segment_v2/segment.cpp @@ -290,7 +290,7 @@ Status Segment::new_iterator(SchemaSPtr schema, const StorageReadOptions& read_o { SCOPED_RAW_TIMER(&read_options.stats->segment_load_index_timer_ns); - RETURN_IF_ERROR(load_index()); + RETURN_IF_ERROR(load_index(read_options.stats)); } if (read_options.delete_condition_predicates->num_of_column_predicate() == 0 && @@ -475,7 +475,7 @@ Status Segment::_parse_footer(SegmentFooterPB* footer) { return Status::OK(); } -Status Segment::_load_pk_bloom_filter() { +Status Segment::_load_pk_bloom_filter(OlapReaderStatistics* stats) { #ifdef BE_TEST if (_pk_index_meta == nullptr) { // for BE UT "segment_cache_test" @@ -490,30 +490,30 @@ Status Segment::_load_pk_bloom_filter() { DCHECK(_pk_index_meta != nullptr); DCHECK(_pk_index_reader != nullptr); - return _load_pk_bf_once.call([this] { - RETURN_IF_ERROR(_pk_index_reader->parse_bf(_file_reader, *_pk_index_meta)); + return _load_pk_bf_once.call([this, stats] { + RETURN_IF_ERROR(_pk_index_reader->parse_bf(_file_reader, *_pk_index_meta, stats)); // _meta_mem_usage += _pk_index_reader->get_bf_memory_size(); return Status::OK(); }); } Status Segment::load_pk_index_and_bf(OlapReaderStatistics* index_load_stats) { - _pk_index_load_stats = index_load_stats; - RETURN_IF_ERROR(load_index()); - RETURN_IF_ERROR(_load_pk_bloom_filter()); + RETURN_IF_ERROR(load_index(index_load_stats)); + RETURN_IF_ERROR(_load_pk_bloom_filter(index_load_stats)); return Status::OK(); } -Status Segment::load_index() { - return _load_index_once.call([this] { +Status Segment::load_index(OlapReaderStatistics* stats) { + return _load_index_once.call([this, stats] { if (_tablet_schema->keys_type() == UNIQUE_KEYS && _pk_index_meta != nullptr) { - _pk_index_reader = std::make_unique(_pk_index_load_stats); - RETURN_IF_ERROR(_pk_index_reader->parse_index(_file_reader, *_pk_index_meta)); + _pk_index_reader = std::make_unique(); + RETURN_IF_ERROR(_pk_index_reader->parse_index(_file_reader, *_pk_index_meta, stats)); // _meta_mem_usage += _pk_index_reader->get_memory_size(); return Status::OK(); } else { // read and parse short key index page OlapReaderStatistics tmp_stats; + OlapReaderStatistics* stats_ptr = stats != nullptr ? stats : &tmp_stats; PageReadOptions opts { .use_page_cache = true, .type = INDEX_PAGE, @@ -522,7 +522,8 @@ Status Segment::load_index() { // short key index page uses NO_COMPRESSION for now .codec = nullptr, .stats = &tmp_stats, - .io_ctx = io::IOContext {.is_index_data = true}, + .io_ctx = io::IOContext {.is_index_data = true, + .file_cache_stats = &stats_ptr->file_cache_stats}, }; Slice body; PageFooterPB footer; @@ -970,8 +971,8 @@ Status Segment::new_inverted_index_iterator(const TabletColumn& tablet_column, Status Segment::lookup_row_key(const Slice& key, const TabletSchema* latest_schema, bool with_seq_col, bool with_rowid, RowLocation* row_location, - std::string* encoded_seq_value, OlapReaderStatistics* stats) { - RETURN_IF_ERROR(load_pk_index_and_bf()); + OlapReaderStatistics* stats, std::string* encoded_seq_value) { + RETURN_IF_ERROR(load_pk_index_and_bf(stats)); bool has_seq_col = latest_schema->has_sequence_col(); bool has_rowid = !latest_schema->cluster_key_uids().empty(); size_t seq_col_length = 0; @@ -1071,9 +1072,10 @@ Status Segment::lookup_row_key(const Slice& key, const TabletSchema* latest_sche } Status Segment::read_key_by_rowid(uint32_t row_id, std::string* key) { - RETURN_IF_ERROR(load_pk_index_and_bf()); + OlapReaderStatistics* null_stat = nullptr; + RETURN_IF_ERROR(load_pk_index_and_bf(null_stat)); std::unique_ptr iter; - RETURN_IF_ERROR(_pk_index_reader->new_iterator(&iter)); + RETURN_IF_ERROR(_pk_index_reader->new_iterator(&iter, null_stat)); auto index_type = vectorized::DataTypeFactory::instance().create_data_type( _pk_index_reader->type_info()->type(), 1, 0); @@ -1129,7 +1131,8 @@ Status Segment::seek_and_read_by_rowid(const TabletSchema& schema, SlotDescripto .use_page_cache = !config::disable_storage_page_cache, .file_reader = file_reader().get(), .stats = &stats, - .io_ctx = io::IOContext {.reader_type = ReaderType::READER_QUERY}, + .io_ctx = io::IOContext {.reader_type = ReaderType::READER_QUERY, + .file_cache_stats = &stats.file_cache_stats}, }; std::vector single_row_loc {row_id}; if (!slot->column_paths().empty()) { diff --git a/be/src/olap/rowset/segment_v2/segment.h b/be/src/olap/rowset/segment_v2/segment.h index ca2fee0e77aa82..441ae3e85e9b3f 100644 --- a/be/src/olap/rowset/segment_v2/segment.h +++ b/be/src/olap/rowset/segment_v2/segment.h @@ -134,9 +134,8 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd } Status lookup_row_key(const Slice& key, const TabletSchema* latest_schema, bool with_seq_col, - bool with_rowid, RowLocation* row_location, - std::string* encoded_seq_value = nullptr, - OlapReaderStatistics* stats = nullptr); + bool with_rowid, RowLocation* row_location, OlapReaderStatistics* stats, + std::string* encoded_seq_value = nullptr); Status read_key_by_rowid(uint32_t row_id, std::string* key); @@ -144,9 +143,9 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd vectorized::MutableColumnPtr& result, OlapReaderStatistics& stats, std::unique_ptr& iterator_hint); - Status load_index(); + Status load_index(OlapReaderStatistics* stats); - Status load_pk_index_and_bf(OlapReaderStatistics* index_load_stats = nullptr); + Status load_pk_index_and_bf(OlapReaderStatistics* stats); void update_healthy_status(Status new_status) { _healthy_status.update(new_status); } // The segment is loaded into SegmentCache and then will load indices, if there are something wrong @@ -227,7 +226,7 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd Status _open(); Status _parse_footer(SegmentFooterPB* footer); Status _create_column_readers(const SegmentFooterPB& footer); - Status _load_pk_bloom_filter(); + Status _load_pk_bloom_filter(OlapReaderStatistics* stats); ColumnReader* _get_column_reader(const TabletColumn& col); // Get Iterator which will read variant root column and extract with paths and types info @@ -305,7 +304,6 @@ class Segment : public std::enable_shared_from_this, public MetadataAdd InvertedIndexFileInfo _idx_file_info; int _be_exec_version = BeExecVersionManager::get_newest_version(); - OlapReaderStatistics* _pk_index_load_stats = nullptr; }; } // namespace segment_v2 diff --git a/be/src/olap/rowset/segment_v2/segment_iterator.cpp b/be/src/olap/rowset/segment_v2/segment_iterator.cpp index 0c54eaa2d6cbaa..5f50ffeea2d8f0 100644 --- a/be/src/olap/rowset/segment_v2/segment_iterator.cpp +++ b/be/src/olap/rowset/segment_v2/segment_iterator.cpp @@ -1181,7 +1181,7 @@ Status SegmentIterator::_lookup_ordinal_from_pk_index(const RowCursor& key, bool bool exact_match = false; std::unique_ptr index_iterator; - RETURN_IF_ERROR(pk_index_reader->new_iterator(&index_iterator)); + RETURN_IF_ERROR(pk_index_reader->new_iterator(&index_iterator, _opts.stats)); Status status = index_iterator->seek_at_or_after(&index_key, &exact_match); if (UNLIKELY(!status.ok())) { diff --git a/be/src/olap/rowset/segment_v2/zone_map_index.cpp b/be/src/olap/rowset/segment_v2/zone_map_index.cpp index c2139ff0899090..9249c82aedfdc3 100644 --- a/be/src/olap/rowset/segment_v2/zone_map_index.cpp +++ b/be/src/olap/rowset/segment_v2/zone_map_index.cpp @@ -140,18 +140,21 @@ Status TypedZoneMapIndexWriter::finish(io::FileWriter* file_writer, return writer.finish(meta->mutable_page_zone_maps()); } -Status ZoneMapIndexReader::load(bool use_page_cache, bool kept_in_memory) { +Status ZoneMapIndexReader::load(bool use_page_cache, bool kept_in_memory, + OlapReaderStatistics* index_load_stats) { // TODO yyq: implement a new once flag to avoid status construct. - return _load_once.call([this, use_page_cache, kept_in_memory] { - return _load(use_page_cache, kept_in_memory, std::move(_page_zone_maps_meta)); + return _load_once.call([this, use_page_cache, kept_in_memory, index_load_stats] { + return _load(use_page_cache, kept_in_memory, std::move(_page_zone_maps_meta), + index_load_stats); }); } Status ZoneMapIndexReader::_load(bool use_page_cache, bool kept_in_memory, - std::unique_ptr page_zone_maps_meta) { + std::unique_ptr page_zone_maps_meta, + OlapReaderStatistics* index_load_stats) { IndexedColumnReader reader(_file_reader, *page_zone_maps_meta); - RETURN_IF_ERROR(reader.load(use_page_cache, kept_in_memory)); - IndexedColumnIterator iter(&reader); + RETURN_IF_ERROR(reader.load(use_page_cache, kept_in_memory, index_load_stats)); + IndexedColumnIterator iter(&reader, index_load_stats); _page_zone_maps.resize(reader.num_values()); diff --git a/be/src/olap/rowset/segment_v2/zone_map_index.h b/be/src/olap/rowset/segment_v2/zone_map_index.h index 34869bbbfeea62..04cae12975c5fa 100644 --- a/be/src/olap/rowset/segment_v2/zone_map_index.h +++ b/be/src/olap/rowset/segment_v2/zone_map_index.h @@ -154,14 +154,16 @@ class ZoneMapIndexReader : public MetadataAdder { virtual ~ZoneMapIndexReader(); // load all page zone maps into memory - Status load(bool use_page_cache, bool kept_in_memory); + Status load(bool use_page_cache, bool kept_in_memory, + OlapReaderStatistics* index_load_stats = nullptr); const std::vector& page_zone_maps() const { return _page_zone_maps; } int32_t num_pages() const { return _page_zone_maps.size(); } private: - Status _load(bool use_page_cache, bool kept_in_memory, std::unique_ptr); + Status _load(bool use_page_cache, bool kept_in_memory, std::unique_ptr, + OlapReaderStatistics* index_load_stats); int64_t get_metadata_size() const override; diff --git a/be/test/olap/date_bloom_filter_test.cpp b/be/test/olap/date_bloom_filter_test.cpp index 715301419e228f..51de4ebd8e7452 100644 --- a/be/test/olap/date_bloom_filter_test.cpp +++ b/be/test/olap/date_bloom_filter_test.cpp @@ -155,8 +155,8 @@ TEST_F(DateBloomFilterTest, query_index_test) { { const auto& reader = segment->_column_readers[0]; std::unique_ptr bf_iter; - EXPECT_TRUE(reader->_bloom_filter_index->load(true, true).ok()); - EXPECT_TRUE(reader->_bloom_filter_index->new_iterator(&bf_iter).ok()); + EXPECT_TRUE(reader->_bloom_filter_index->load(true, true, nullptr).ok()); + EXPECT_TRUE(reader->_bloom_filter_index->new_iterator(&bf_iter, nullptr).ok()); std::unique_ptr bf; EXPECT_TRUE(bf_iter->read_bloom_filter(0, &bf).ok()); auto test = [&](const std::string& query_string, bool result) { @@ -174,8 +174,8 @@ TEST_F(DateBloomFilterTest, query_index_test) { { const auto& reader = segment->_column_readers[1]; std::unique_ptr bf_iter; - EXPECT_TRUE(reader->_bloom_filter_index->load(true, true).ok()); - EXPECT_TRUE(reader->_bloom_filter_index->new_iterator(&bf_iter).ok()); + EXPECT_TRUE(reader->_bloom_filter_index->load(true, true, nullptr).ok()); + EXPECT_TRUE(reader->_bloom_filter_index->new_iterator(&bf_iter, nullptr).ok()); std::unique_ptr bf; EXPECT_TRUE(bf_iter->read_bloom_filter(0, &bf).ok()); auto test = [&](const std::string& query_string, bool result) { diff --git a/be/test/olap/primary_key_index_test.cpp b/be/test/olap/primary_key_index_test.cpp index 72aae56cd0938f..9407be938867ec 100644 --- a/be/test/olap/primary_key_index_test.cpp +++ b/be/test/olap/primary_key_index_test.cpp @@ -80,12 +80,12 @@ TEST_F(PrimaryKeyIndexTest, builder) { PrimaryKeyIndexReader index_reader; io::FileReaderSPtr file_reader; EXPECT_TRUE(fs->open_file(filename, &file_reader).ok()); - EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta).ok()); - EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta).ok()); + EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta, nullptr).ok()); + EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta, nullptr).ok()); EXPECT_EQ(num_rows, index_reader.num_rows()); std::unique_ptr index_iterator; - EXPECT_TRUE(index_reader.new_iterator(&index_iterator).ok()); + EXPECT_TRUE(index_reader.new_iterator(&index_iterator, nullptr).ok()); bool exact_match = false; uint32_t row_id; for (size_t i = 0; i < keys.size(); i++) { @@ -142,7 +142,7 @@ TEST_F(PrimaryKeyIndexTest, builder) { int batch_size = 1024; while (remaining > 0) { std::unique_ptr iter; - EXPECT_TRUE(index_reader.new_iterator(&iter).ok()); + EXPECT_TRUE(index_reader.new_iterator(&iter, nullptr).ok()); size_t num_to_read = std::min(batch_size, remaining); auto index_type = vectorized::DataTypeFactory::instance().create_data_type( @@ -199,12 +199,12 @@ TEST_F(PrimaryKeyIndexTest, multiple_pages) { PrimaryKeyIndexReader index_reader; io::FileReaderSPtr file_reader; EXPECT_TRUE(fs->open_file(filename, &file_reader).ok()); - EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta).ok()); - EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta).ok()); + EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta, nullptr).ok()); + EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta, nullptr).ok()); EXPECT_EQ(num_rows, index_reader.num_rows()); std::unique_ptr index_iterator; - EXPECT_TRUE(index_reader.new_iterator(&index_iterator).ok()); + EXPECT_TRUE(index_reader.new_iterator(&index_iterator, nullptr).ok()); bool exact_match = false; uint32_t row_id; for (size_t i = 0; i < keys.size(); i++) { @@ -283,12 +283,12 @@ TEST_F(PrimaryKeyIndexTest, single_page) { PrimaryKeyIndexReader index_reader; io::FileReaderSPtr file_reader; EXPECT_TRUE(fs->open_file(filename, &file_reader).ok()); - EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta).ok()); - EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta).ok()); + EXPECT_TRUE(index_reader.parse_index(file_reader, index_meta, nullptr).ok()); + EXPECT_TRUE(index_reader.parse_bf(file_reader, index_meta, nullptr).ok()); EXPECT_EQ(num_rows, index_reader.num_rows()); std::unique_ptr index_iterator; - EXPECT_TRUE(index_reader.new_iterator(&index_iterator).ok()); + EXPECT_TRUE(index_reader.new_iterator(&index_iterator, nullptr).ok()); bool exact_match = false; uint32_t row_id; for (size_t i = 0; i < keys.size(); i++) { diff --git a/be/test/olap/rowset/segment_v2/bloom_filter_index_reader_writer_test.cpp b/be/test/olap/rowset/segment_v2/bloom_filter_index_reader_writer_test.cpp index 813952595efcfd..e561f8ce944887 100644 --- a/be/test/olap/rowset/segment_v2/bloom_filter_index_reader_writer_test.cpp +++ b/be/test/olap/rowset/segment_v2/bloom_filter_index_reader_writer_test.cpp @@ -124,10 +124,10 @@ void get_bloom_filter_reader_iter(const std::string& file_name, const ColumnInde io::FileReaderSPtr file_reader; ASSERT_EQ(io::global_local_filesystem()->open_file(fname, &file_reader), Status::OK()); *reader = new BloomFilterIndexReader(std::move(file_reader), meta.bloom_filter_index()); - auto st = (*reader)->load(true, false); + auto st = (*reader)->load(true, false, nullptr); EXPECT_TRUE(st.ok()); - st = (*reader)->new_iterator(iter); + st = (*reader)->new_iterator(iter, nullptr); EXPECT_TRUE(st.ok()); } diff --git a/be/test/olap/rowset/segment_v2/ordinal_page_index_test.cpp b/be/test/olap/rowset/segment_v2/ordinal_page_index_test.cpp index 33848c5959cfaa..ffd9c92ee0272e 100644 --- a/be/test/olap/rowset/segment_v2/ordinal_page_index_test.cpp +++ b/be/test/olap/rowset/segment_v2/ordinal_page_index_test.cpp @@ -74,7 +74,7 @@ TEST_F(OrdinalPageIndexTest, normal) { io::FileReaderSPtr file_reader; EXPECT_TRUE(fs->open_file(filename, &file_reader).ok()); OrdinalIndexReader index(file_reader, 16 * 1024 * 4096 + 1, index_meta.ordinal_index()); - EXPECT_TRUE(index.load(true, false).ok()); + EXPECT_TRUE(index.load(true, false, nullptr).ok()); EXPECT_EQ(16 * 1024, index.num_data_pages()); EXPECT_EQ(1, index.get_first_ordinal(0)); EXPECT_EQ(4096, index.get_last_ordinal(0)); @@ -128,7 +128,7 @@ TEST_F(OrdinalPageIndexTest, one_data_page) { } OrdinalIndexReader index(nullptr, num_values, index_meta.ordinal_index()); - EXPECT_TRUE(index.load(true, false).ok()); + EXPECT_TRUE(index.load(true, false, nullptr).ok()); EXPECT_EQ(1, index.num_data_pages()); EXPECT_EQ(0, index.get_first_ordinal(0)); EXPECT_EQ(num_values - 1, index.get_last_ordinal(0)); diff --git a/be/test/olap/segment_cache_test.cpp b/be/test/olap/segment_cache_test.cpp index b226bc6c2292e5..c527ffddd424b9 100644 --- a/be/test/olap/segment_cache_test.cpp +++ b/be/test/olap/segment_cache_test.cpp @@ -323,7 +323,7 @@ TEST_F(SegmentCacheTest, vec_sequence_col) { segment_v2::SegmentSharedPtr segment_ptr = handle.get_segments()[0]; // load index and bf second - res = segment_ptr->load_pk_index_and_bf(); + res = segment_ptr->load_pk_index_and_bf(nullptr); ASSERT_TRUE(res.ok()); // check cache mem usage equals to segment mem usage From 034085ac4f90f93ea1b80695a020d1da8ebe3f91 Mon Sep 17 00:00:00 2001 From: walter Date: Fri, 20 Dec 2024 22:27:12 +0800 Subject: [PATCH 36/82] [fix](restore) Lock tablet before modify segment files (#45711) There is a race condition between the tablet checkpoint and the snapshot move task since the checkpoint will depend on the segment files to check data size correctness, and the move task will delete the tablet directory and move the downloaded files into it. This PR makes the move task to take tablet locks, before deleting the directory. --- be/src/olap/tablet.cpp | 4 +- be/src/olap/tablet.h | 5 +- be/src/runtime/snapshot_loader.cpp | 97 ++++++++++++++++++------------ 3 files changed, 63 insertions(+), 43 deletions(-) diff --git a/be/src/olap/tablet.cpp b/be/src/olap/tablet.cpp index c7919b3f8dca24..1758166e76edee 100644 --- a/be/src/olap/tablet.cpp +++ b/be/src/olap/tablet.cpp @@ -2766,7 +2766,7 @@ void Tablet::check_table_size_correctness() { const std::vector& all_rs_metas = _tablet_meta->all_rs_metas(); for (const auto& rs_meta : all_rs_metas) { int64_t total_segment_size = get_segment_file_size(rs_meta); - int64_t total_inverted_index_size = get_inverted_index_file_szie(rs_meta); + int64_t total_inverted_index_size = get_inverted_index_file_size(rs_meta); if (rs_meta->data_disk_size() != total_segment_size || rs_meta->index_disk_size() != total_inverted_index_size || rs_meta->data_disk_size() + rs_meta->index_disk_size() != rs_meta->total_disk_size()) { @@ -2817,7 +2817,7 @@ int64_t Tablet::get_segment_file_size(const RowsetMetaSharedPtr& rs_meta) { return total_segment_size; } -int64_t Tablet::get_inverted_index_file_szie(const RowsetMetaSharedPtr& rs_meta) { +int64_t Tablet::get_inverted_index_file_size(const RowsetMetaSharedPtr& rs_meta) { const auto& fs = rs_meta->fs(); if (!fs) { LOG(WARNING) << "get fs failed, resource_id={}" << rs_meta->resource_id(); diff --git a/be/src/olap/tablet.h b/be/src/olap/tablet.h index d00476f044191c..afe043bf15195b 100644 --- a/be/src/olap/tablet.h +++ b/be/src/olap/tablet.h @@ -214,6 +214,7 @@ class Tablet final : public BaseTablet { std::mutex& get_push_lock() { return _ingest_lock; } std::mutex& get_base_compaction_lock() { return _base_compaction_lock; } std::mutex& get_cumulative_compaction_lock() { return _cumulative_compaction_lock; } + std::shared_mutex& get_meta_store_lock() { return _meta_store_lock; } std::shared_timed_mutex& get_migration_lock() { return _migration_lock; } @@ -531,7 +532,7 @@ class Tablet final : public BaseTablet { void check_table_size_correctness(); std::string get_segment_path(const RowsetMetaSharedPtr& rs_meta, int64_t seg_id); int64_t get_segment_file_size(const RowsetMetaSharedPtr& rs_meta); - int64_t get_inverted_index_file_szie(const RowsetMetaSharedPtr& rs_meta); + int64_t get_inverted_index_file_size(const RowsetMetaSharedPtr& rs_meta); public: static const int64_t K_INVALID_CUMULATIVE_POINT = -1; @@ -588,7 +589,7 @@ class Tablet final : public BaseTablet { std::shared_ptr _cumulative_compaction_policy; std::string_view _cumulative_compaction_type; - // use a seperate thread to check all tablets paths existance + // use a separate thread to check all tablets paths existence std::atomic _is_tablet_path_exists; int64_t _last_missed_version; diff --git a/be/src/runtime/snapshot_loader.cpp b/be/src/runtime/snapshot_loader.cpp index b492a929fca3bf..c5b27c823054a4 100644 --- a/be/src/runtime/snapshot_loader.cpp +++ b/be/src/runtime/snapshot_loader.cpp @@ -765,49 +765,68 @@ Status SnapshotLoader::move(const std::string& snapshot_path, TabletSharedPtr ta return Status::InternalError(err_msg); } - if (overwrite) { - std::vector snapshot_files; - RETURN_IF_ERROR(_get_existing_files_from_local(snapshot_path, &snapshot_files)); - - // 1. simply delete the old dir and replace it with the snapshot dir - try { - // This remove seems soft enough, because we already get - // tablet id and schema hash from this path, which - // means this path is a valid path. - std::filesystem::remove_all(tablet_path); - VLOG_CRITICAL << "remove dir: " << tablet_path; - std::filesystem::create_directory(tablet_path); - VLOG_CRITICAL << "re-create dir: " << tablet_path; - } catch (const std::filesystem::filesystem_error& e) { - std::stringstream ss; - ss << "failed to move tablet path: " << tablet_path << ". err: " << e.what(); - LOG(WARNING) << ss.str(); - return Status::InternalError(ss.str()); - } + if (!overwrite) { + throw Exception(Status::FatalError("only support overwrite now")); + } - // link files one by one - // files in snapshot dir will be moved in snapshot clean process - std::vector linked_files; - for (auto& file : snapshot_files) { - auto full_src_path = fmt::format("{}/{}", snapshot_path, file); - auto full_dest_path = fmt::format("{}/{}", tablet_path, file); - if (link(full_src_path.c_str(), full_dest_path.c_str()) != 0) { - LOG(WARNING) << "failed to link file from " << full_src_path << " to " - << full_dest_path << ", err: " << std::strerror(errno); - - // clean the already linked files - for (auto& linked_file : linked_files) { - remove(linked_file.c_str()); - } + // Medium migration/clone/checkpoint/compaction may change or check the + // files and tablet meta, so we need to take these locks. + std::unique_lock migration_lock(tablet->get_migration_lock(), std::try_to_lock); + std::unique_lock base_compact_lock(tablet->get_base_compaction_lock(), std::try_to_lock); + std::unique_lock cumu_compact_lock(tablet->get_cumulative_compaction_lock(), std::try_to_lock); + std::unique_lock cold_compact_lock(tablet->get_cold_compaction_lock(), std::try_to_lock); + std::unique_lock build_idx_lock(tablet->get_build_inverted_index_lock(), std::try_to_lock); + std::unique_lock meta_store_lock(tablet->get_meta_store_lock(), std::try_to_lock); + if (!migration_lock.owns_lock() || !base_compact_lock.owns_lock() || + !cumu_compact_lock.owns_lock() || !cold_compact_lock.owns_lock() || + !build_idx_lock.owns_lock() || !meta_store_lock.owns_lock()) { + // This error should be retryable + auto status = Status::ObtainLockFailed("failed to get tablet locks, tablet: {}", tablet_id); + LOG(WARNING) << status << ", snapshot path: " << snapshot_path + << ", tablet path: " << tablet_path; + return status; + } - return Status::InternalError("move tablet failed"); + std::vector snapshot_files; + RETURN_IF_ERROR(_get_existing_files_from_local(snapshot_path, &snapshot_files)); + + // FIXME: the below logic will demage the tablet files if failed in the middle. + + // 1. simply delete the old dir and replace it with the snapshot dir + try { + // This remove seems soft enough, because we already get + // tablet id and schema hash from this path, which + // means this path is a valid path. + std::filesystem::remove_all(tablet_path); + VLOG_CRITICAL << "remove dir: " << tablet_path; + std::filesystem::create_directory(tablet_path); + VLOG_CRITICAL << "re-create dir: " << tablet_path; + } catch (const std::filesystem::filesystem_error& e) { + std::stringstream ss; + ss << "failed to move tablet path: " << tablet_path << ". err: " << e.what(); + LOG(WARNING) << ss.str(); + return Status::InternalError(ss.str()); + } + + // link files one by one + // files in snapshot dir will be moved in snapshot clean process + std::vector linked_files; + for (auto& file : snapshot_files) { + auto full_src_path = fmt::format("{}/{}", snapshot_path, file); + auto full_dest_path = fmt::format("{}/{}", tablet_path, file); + if (link(full_src_path.c_str(), full_dest_path.c_str()) != 0) { + LOG(WARNING) << "failed to link file from " << full_src_path << " to " << full_dest_path + << ", err: " << std::strerror(errno); + + // clean the already linked files + for (auto& linked_file : linked_files) { + remove(linked_file.c_str()); } - linked_files.push_back(full_dest_path); - VLOG_CRITICAL << "link file from " << full_src_path << " to " << full_dest_path; - } - } else { - throw Exception(Status::FatalError("only support overwrite now")); + return Status::InternalError("move tablet failed"); + } + linked_files.push_back(full_dest_path); + VLOG_CRITICAL << "link file from " << full_src_path << " to " << full_dest_path; } // snapshot loader not need to change tablet uid From 0ab4eae228e815b3237e70dab54d67808df634df Mon Sep 17 00:00:00 2001 From: zhangstar333 Date: Fri, 20 Dec 2024 22:49:46 +0800 Subject: [PATCH 37/82] [Bug](function) fix is_ip_address_in_range function parse error throw exception (#45657) ### What problem does this PR solve? Problem Summary: before: will be throw exception when parse NULL value, as the input is empty invalid. so need check firstly and then parse it. ``` mysql [test]>select * from ip_test; +------+------------------+ | id | data | +------+------------------+ | 54 | 2001:db8:4::/128 | | 55 | NULL | +------+------------------+ 2 rows in set (0.07 sec) mysql [test]>SELECT data, IS_IP_ADDRESS_IN_RANGE(CAST('0.0.0.1' AS STRING), data) FROM ip_test; ERROR 1105 (HY000): errCode = 2, detailMessage = (10.16.10.8)[INVALID_ARGUMENT][E33] The text does not contain '/': ``` --- be/src/vec/functions/function_ip.h | 23 +++++++++++++------ .../test_is_ip_address_in_range_function.out | 8 +++++++ ...est_is_ip_address_in_range_function.groovy | 9 ++++++++ 3 files changed, 33 insertions(+), 7 deletions(-) diff --git a/be/src/vec/functions/function_ip.h b/be/src/vec/functions/function_ip.h index 67edad5015aeaf..1a1c23e2b06c35 100644 --- a/be/src/vec/functions/function_ip.h +++ b/be/src/vec/functions/function_ip.h @@ -615,8 +615,13 @@ class FunctionIsIPAddressInRange : public IFunction { for (size_t i = 0; i < input_rows_count; ++i) { auto addr_idx = index_check_const(i, addr_const); auto cidr_idx = index_check_const(i, cidr_const); - const auto cidr = - parse_ip_with_cidr(str_cidr_column->get_data_at(cidr_idx).to_string_view()); + auto cidr_data = str_cidr_column->get_data_at(cidr_idx); + // cidr_data maybe NULL, But the input column is nested column, so check here avoid throw exception + if (cidr_data.data == nullptr || cidr_data.size == 0) { + col_res_data[i] = 0; + continue; + } + const auto cidr = parse_ip_with_cidr(cidr_data.to_string_view()); if constexpr (PT == PrimitiveType::TYPE_IPV4) { if (cidr._address.as_v4()) { col_res_data[i] = match_ipv4_subnet(ip_data[addr_idx], cidr._address.as_v4(), @@ -775,11 +780,15 @@ class FunctionIsIPAddressInRange : public IFunction { for (size_t i = 0; i < input_rows_count; ++i) { auto addr_idx = index_check_const(i, addr_const); auto cidr_idx = index_check_const(i, cidr_const); - - const auto addr = - IPAddressVariant(str_addr_column->get_data_at(addr_idx).to_string_view()); - const auto cidr = - parse_ip_with_cidr(str_cidr_column->get_data_at(cidr_idx).to_string_view()); + auto addr_data = str_addr_column->get_data_at(addr_idx); + auto cidr_data = str_cidr_column->get_data_at(cidr_idx); + // cidr_data maybe NULL, But the input column is nested column, so check here avoid throw exception + if (cidr_data.data == nullptr || cidr_data.size == 0) { + col_res_data[i] = 0; + continue; + } + const auto addr = IPAddressVariant(addr_data.to_string_view()); + const auto cidr = parse_ip_with_cidr(cidr_data.to_string_view()); col_res_data[i] = is_address_in_range(addr, cidr) ? 1 : 0; } } diff --git a/regression-test/data/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.out b/regression-test/data/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.out index 285b861b742c5b..759b6c890ea13e 100644 --- a/regression-test/data/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.out +++ b/regression-test/data/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.out @@ -92,3 +92,11 @@ -- !sql -- \N +-- !sql1 -- +54 2001:db8:4::/128 +55 \N + +-- !sql2 -- +\N \N +2001:db8:4::/128 false + diff --git a/regression-test/suites/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.groovy b/regression-test/suites/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.groovy index 812bfffeb2f3e0..cee47c818130a7 100644 --- a/regression-test/suites/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.groovy +++ b/regression-test/suites/query_p0/sql_functions/ip_functions/test_is_ip_address_in_range_function.groovy @@ -78,4 +78,13 @@ suite("test_is_ip_address_in_range_function") { qt_sql "SELECT is_ip_address_in_range(NULL, '::ffff:192.168.0.4/128')" qt_sql "SELECT is_ip_address_in_range(NULL, NULL)" + + + sql """ DROP TABLE IF EXISTS ip_test """ + sql """ CREATE TABLE IF NOT EXISTS ip_test(id INT, data string) DISTRIBUTED BY HASH(id) BUCKETS 1 PROPERTIES ('replication_num' = '1');""" + sql """ INSERT INTO ip_test values (54, '2001:db8:4::/128'); """ + sql """ INSERT INTO ip_test values (55, NULL); """ + qt_sql1 """ select * from ip_test order by 1; """ + qt_sql2 "SELECT data, IS_IP_ADDRESS_IN_RANGE(CAST('0.0.0.1' AS STRING), data) FROM ip_test order by 1;" + } \ No newline at end of file From f6071a3833bb095f01065a9d10bc828f59cac883 Mon Sep 17 00:00:00 2001 From: feiniaofeiafei Date: Sat, 21 Dec 2024 18:00:07 +0800 Subject: [PATCH 38/82] [enhance](nereids) date_add, date_sub, date_diff, date_floor, date_ceil function implement Monotonic (#44943) date_add, date_sub, date_diff, date_floor, date_ceil function implement Monotonic, so we can do prune range partition for this functions, for example `date_add(dt, 1) = '2024-01-01'` --- .../rules/OneRangePartitionEvaluator.java | 15 +- .../functions/DateAddSubMonotonic.java | 38 ++ .../functions/DateCeilFloorMonotonic.java | 47 ++ .../functions/DateDiffMonotonic.java | 39 ++ .../expressions/functions/Monotonic.java | 5 + .../expressions/functions/scalar/DayCeil.java | 17 +- .../functions/scalar/DayFloor.java | 17 +- .../expressions/functions/scalar/DaysAdd.java | 8 +- .../functions/scalar/DaysDiff.java | 13 +- .../expressions/functions/scalar/DaysSub.java | 8 +- .../functions/scalar/FromDays.java | 18 +- .../functions/scalar/HourCeil.java | 17 +- .../functions/scalar/HourFloor.java | 17 +- .../functions/scalar/HoursAdd.java | 9 +- .../functions/scalar/HoursDiff.java | 13 +- .../functions/scalar/HoursSub.java | 9 +- .../functions/scalar/MicroSecondsAdd.java | 9 +- .../functions/scalar/MicroSecondsDiff.java | 13 +- .../functions/scalar/MicroSecondsSub.java | 9 +- .../functions/scalar/MilliSecondsAdd.java | 9 +- .../functions/scalar/MilliSecondsDiff.java | 13 +- .../functions/scalar/MilliSecondsSub.java | 9 +- .../functions/scalar/MinuteCeil.java | 17 +- .../functions/scalar/MinuteFloor.java | 17 +- .../functions/scalar/MinutesAdd.java | 9 +- .../functions/scalar/MinutesDiff.java | 13 +- .../functions/scalar/MinutesSub.java | 9 +- .../functions/scalar/MonthCeil.java | 17 +- .../functions/scalar/MonthFloor.java | 17 +- .../functions/scalar/MonthsAdd.java | 8 +- .../functions/scalar/MonthsDiff.java | 13 +- .../functions/scalar/MonthsSub.java | 8 +- .../functions/scalar/SecondCeil.java | 17 +- .../functions/scalar/SecondFloor.java | 17 +- .../functions/scalar/SecondsAdd.java | 9 +- .../functions/scalar/SecondsDiff.java | 13 +- .../functions/scalar/SecondsSub.java | 9 +- .../functions/scalar/UnixTimestamp.java | 39 +- .../functions/scalar/YearCeil.java | 17 +- .../functions/scalar/YearFloor.java | 17 +- .../functions/scalar/YearsAdd.java | 8 +- .../functions/scalar/YearsDiff.java | 13 +- .../functions/scalar/YearsSub.java | 8 +- .../expressions/literal/DateTimeLiteral.java | 5 +- .../test_add_sub_diff_ceil_floor.groovy | 407 ++++++++++++++++++ .../partition_prune/test_convert_tz.groovy | 6 +- 46 files changed, 1019 insertions(+), 46 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateAddSubMonotonic.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateCeilFloorMonotonic.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateDiffMonotonic.java create mode 100644 regression-test/suites/nereids_rules_p0/partition_prune/test_add_sub_diff_ceil_floor.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/OneRangePartitionEvaluator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/OneRangePartitionEvaluator.java index 7e91d5502f7fad..eb9fd6e149160b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/OneRangePartitionEvaluator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/OneRangePartitionEvaluator.java @@ -48,6 +48,7 @@ import org.apache.doris.nereids.trees.expressions.literal.BooleanLiteral; import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.literal.MaxLiteral; +import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BooleanType; import org.apache.doris.nereids.util.ExpressionUtils; @@ -807,22 +808,29 @@ private EvaluateRangeResult computeMonotonicFunctionRange(EvaluateRangeResult re : new NonNullable(funcChild)); partitionSlotContainsNull.put((Expression) func, withNullable.nullable()); - if (!result.childrenResult.get(0).columnRanges.containsKey(funcChild)) { + if (!result.childrenResult.get(childIndex).columnRanges.containsKey(funcChild)) { return result; } - ColumnRange childRange = result.childrenResult.get(0).columnRanges.get(funcChild); + ColumnRange childRange = result.childrenResult.get(childIndex).columnRanges.get(funcChild); if (childRange.isEmptyRange() || childRange.asRanges().size() != 1 || (!childRange.span().hasLowerBound() && !childRange.span().hasUpperBound())) { return result; } Range span = childRange.span(); + // null means positive infinity or negative infinity Literal lower = span.hasLowerBound() ? span.lowerEndpoint().getValue() : null; Literal upper = span.hasUpperBound() && !(span.upperEndpoint().getValue() instanceof MaxLiteral) ? span.upperEndpoint().getValue() : null; + if (!func.isMonotonic(lower, upper)) { + return result; + } Expression lowerValue = lower != null ? FoldConstantRuleOnFE.evaluate(func.withConstantArgs(lower), expressionRewriteContext) : null; Expression upperValue = upper != null ? FoldConstantRuleOnFE.evaluate(func.withConstantArgs(upper), expressionRewriteContext) : null; + if (lowerValue instanceof NullLiteral || upperValue instanceof NullLiteral) { + return result; + } if (!func.isPositive()) { Expression temp = lowerValue; lowerValue = upperValue; @@ -842,6 +850,9 @@ private EvaluateRangeResult computeMonotonicFunctionRange(EvaluateRangeResult re if (upperValue instanceof Literal) { newRange = newRange.withUpperBound((Literal) upperValue); } + if (newRange.isEmptyRange() || !newRange.span().hasLowerBound() && !newRange.span().hasUpperBound()) { + return result; + } context.rangeMap.put((Expression) func, newRange); newRanges.put((Expression) func, newRange); return new EvaluateRangeResult((Expression) func, newRanges, result.childrenResult); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateAddSubMonotonic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateAddSubMonotonic.java new file mode 100644 index 00000000000000..7fec22fd9d317d --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateAddSubMonotonic.java @@ -0,0 +1,38 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.expressions.functions; + +import org.apache.doris.nereids.trees.expressions.literal.Literal; + +/** monotonicity for XX_ADD XX_SUB */ +public interface DateAddSubMonotonic extends Monotonic { + @Override + default boolean isMonotonic(Literal lower, Literal upper) { + return child(1) instanceof Literal; + } + + @Override + default boolean isPositive() { + return true; + } + + @Override + default int getMonotonicFunctionChildIndex() { + return 0; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateCeilFloorMonotonic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateCeilFloorMonotonic.java new file mode 100644 index 00000000000000..71ad80a347176a --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateCeilFloorMonotonic.java @@ -0,0 +1,47 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.expressions.functions; + +import org.apache.doris.nereids.trees.expressions.literal.Literal; + +/** monotonicity of XX_CEIL and XX_FLOOR */ +public interface DateCeilFloorMonotonic extends Monotonic { + @Override + default boolean isMonotonic(Literal lower, Literal upper) { + switch (arity()) { + case 1: + return true; + case 2: + return !(child(0) instanceof Literal) && child(1) instanceof Literal; + case 3: + return !(child(0) instanceof Literal) && child(1) instanceof Literal && child(2) instanceof Literal; + default: + return false; + } + } + + @Override + default boolean isPositive() { + return true; + } + + @Override + default int getMonotonicFunctionChildIndex() { + return 0; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateDiffMonotonic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateDiffMonotonic.java new file mode 100644 index 00000000000000..daaea895b6d15b --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/DateDiffMonotonic.java @@ -0,0 +1,39 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.expressions.functions; + +import org.apache.doris.nereids.trees.expressions.literal.Literal; + +/** monotonicity for XX_DIFF */ +public interface DateDiffMonotonic extends Monotonic { + @Override + default boolean isMonotonic(Literal lower, Literal upper) { + return !(child(0) instanceof Literal) && child(1) instanceof Literal + || child(0) instanceof Literal && !(child(1) instanceof Literal); + } + + @Override + default boolean isPositive() { + return child(1) instanceof Literal; + } + + @Override + default int getMonotonicFunctionChildIndex() { + return child(1) instanceof Literal ? 0 : 1; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Monotonic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Monotonic.java index bcaa040cb2a650..feec5933890e67 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Monotonic.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/Monotonic.java @@ -18,9 +18,14 @@ package org.apache.doris.nereids.trees.expressions.functions; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.literal.Literal; /** monotonicity of expressions */ public interface Monotonic extends ExpressionTrait { + default boolean isMonotonic(Literal lower, Literal upper) { + return true; + } + // true means that the function is an increasing function boolean isPositive(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayCeil.java index e77c307b523869..740363b50aad2b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -37,7 +38,7 @@ * ScalarFunction 'day_ceil'. This class is generated by GenerateFunction. */ public class DayCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -106,4 +107,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDayCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new DayCeil(literal); + case 2: + return new DayCeil(literal, child(1)); + case 3: + return new DayCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayFloor.java index b7e04e3a374629..5ba7fc13c7526b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DayFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -37,7 +38,7 @@ * ScalarFunction 'day_floor'. This class is generated by GenerateFunction. */ public class DayFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -106,4 +107,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDayFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new DayFloor(literal); + case 2: + return new DayFloor(literal, child(1)); + case 3: + return new DayFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysAdd.java index e02c20eee82a04..a231816a330eff 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysAdd.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class DaysAdd extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() private static final List SIGNATURES = Config.enable_date_conversion ? ImmutableList.of( @@ -77,4 +78,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDaysAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new DaysAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysDiff.java index e0343f1148f162..c6f3377fdc99c5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'days_diff'. This class is generated by GenerateFunction. */ public class DaysDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDaysDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new DaysDiff(literal, child(1)); + } else { + return new DaysDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysSub.java index 8d135dc6c9cf72..5dab58ecdf63c5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/DaysSub.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class DaysSub extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() private static final List SIGNATURES = Config.enable_date_conversion ? ImmutableList.of( @@ -77,4 +78,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitDaysSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new DaysSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/FromDays.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/FromDays.java index c3d19588ce5f2f..1799c1461fc1b4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/FromDays.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/FromDays.java @@ -21,6 +21,7 @@ import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; +import org.apache.doris.nereids.trees.expressions.functions.Monotonic; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.shape.UnaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'from_days'. This class is generated by GenerateFunction. */ public class FromDays extends ScalarFunction - implements UnaryExpression, ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements UnaryExpression, ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, Monotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateV2Type.INSTANCE).args(IntegerType.INSTANCE) @@ -67,4 +68,19 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitFromDays(this, context); } + + @Override + public boolean isPositive() { + return true; + } + + @Override + public int getMonotonicFunctionChildIndex() { + return 0; + } + + @Override + public Expression withConstantArgs(Expression literal) { + return new FromDays(literal); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourCeil.java index e76151ef9d682c..13358e18d74770 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'hour_ceil'. This class is generated by GenerateFunction. */ public class HourCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -100,4 +101,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHourCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new HourCeil(literal); + case 2: + return new HourCeil(literal, child(1)); + case 3: + return new HourCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourFloor.java index 567f0a2dd188da..f48e26ea443f25 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HourFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'hour_floor'. This class is generated by GenerateFunction. */ public class HourFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -100,4 +101,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHourFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new HourFloor(literal); + case 2: + return new HourFloor(literal, child(1)); + case 3: + return new HourFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursAdd.java index cb4e601b14d001..4c10b204597975 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'days_add'. */ public class HoursAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHoursAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new HoursAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursDiff.java index 452e3110ff764d..63942cd56e9e5b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'hours_diff'. This class is generated by GenerateFunction. */ public class HoursDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHoursDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new HoursDiff(literal, child(1)); + } else { + return new HoursDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursSub.java index e53f8ecb90528a..49e8e5cb50f69a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/HoursSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'hours_sub'. */ public class HoursSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { public static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitHoursSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new HoursSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsAdd.java index 8d792259440dd2..33dd5809f2b1a8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -35,7 +36,8 @@ * ScalarFunction 'MicroSeconds_add'. */ public class MicroSecondsAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.MAX) @@ -66,4 +68,9 @@ public FunctionSignature computeSignature(FunctionSignature signature) { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMicroSecondsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MicroSecondsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsDiff.java index 8bf3a9648396d7..160e8a96b13e68 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -35,7 +37,7 @@ * ScalarFunction 'microseconds_diff'. This class is generated by GenerateFunction. */ public class MicroSecondsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -67,4 +69,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMicroSecondsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new MicroSecondsDiff(literal, child(1)); + } else { + return new MicroSecondsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsSub.java index 2894d1fffc902f..20c880fb879298 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MicroSecondsSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -35,7 +36,8 @@ * ScalarFunction 'MicroSeconds_sub'. */ public class MicroSecondsSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.MAX) @@ -66,4 +68,9 @@ public FunctionSignature computeSignature(FunctionSignature signature) { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMicroSecondsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MicroSecondsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsAdd.java index 1cb56b13f84ed5..244b661db3afed 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -35,7 +36,8 @@ * ScalarFunction 'MilliSeconds_add'. */ public class MilliSecondsAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.MAX) @@ -66,4 +68,9 @@ public FunctionSignature computeSignature(FunctionSignature signature) { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMilliSecondsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MilliSecondsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsDiff.java index 4500bd69460d98..0e8c623ce176b2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -35,7 +37,7 @@ * ScalarFunction 'milliseconds_diff'. This class is generated by GenerateFunction. */ public class MilliSecondsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -67,4 +69,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMilliSecondsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new MilliSecondsDiff(literal, child(1)); + } else { + return new MilliSecondsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsSub.java index 42891b7e7e0b22..10b4f8184d12d8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MilliSecondsSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -35,7 +36,8 @@ * ScalarFunction 'MilliSeconds_sub'. */ public class MilliSecondsSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.MAX) @@ -66,4 +68,9 @@ public FunctionSignature computeSignature(FunctionSignature signature) { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMilliSecondsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MilliSecondsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteCeil.java index b00eaff07c2c62..4f3e317d00ef39 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DateTimeType; @@ -35,7 +36,7 @@ * ScalarFunction 'minute_ceil'. This class is generated by GenerateFunction. */ public class MinuteCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable { + implements ExplicitlyCastableSignature, AlwaysNullable, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -99,4 +100,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinuteCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new MinuteCeil(literal); + case 2: + return new MinuteCeil(literal, child(1)); + case 3: + return new MinuteCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteFloor.java index 683acc3a48381c..cefb5222c764ad 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinuteFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DateTimeType; @@ -35,7 +36,7 @@ * ScalarFunction 'minute_floor'. This class is generated by GenerateFunction. */ public class MinuteFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable { + implements ExplicitlyCastableSignature, AlwaysNullable, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -99,4 +100,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinuteFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new MinuteFloor(literal); + case 2: + return new MinuteFloor(literal, child(1)); + case 3: + return new MinuteFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesAdd.java index f4c02fb84ca3d6..8ba1642f6248b6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'minutes_add'. */ public class MinutesAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinutesAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MinutesAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesDiff.java index 4d011116334bf4..91c254be7c14dc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'minutes_diff'. This class is generated by GenerateFunction. */ public class MinutesDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinutesDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new MinutesDiff(literal, child(1)); + } else { + return new MinutesDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesSub.java index 4fb616957813a7..2a29d9e1659963 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MinutesSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'minutes_sub'. */ public class MinutesSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMinutesSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MinutesSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthCeil.java index 627568cf28a145..a1f4628dd6a9ab 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -37,7 +38,7 @@ * ScalarFunction 'month_ceil'. This class is generated by GenerateFunction. */ public class MonthCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -106,4 +107,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new MonthCeil(literal); + case 2: + return new MonthCeil(literal, child(1)); + case 3: + return new MonthCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthFloor.java index f3b0b66c1396d1..d55d52ab68ce5a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -37,7 +38,7 @@ * ScalarFunction 'month_floor'. This class is generated by GenerateFunction. */ public class MonthFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -106,4 +107,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new MonthFloor(literal); + case 2: + return new MonthFloor(literal, child(1)); + case 3: + return new MonthFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsAdd.java index 5126400b71efaa..1cca6d8446fef6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsAdd.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class MonthsAdd extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() @@ -78,4 +79,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MonthsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsDiff.java index 373265b1e3822a..a850767a49fa40 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'months_diff'. This class is generated by GenerateFunction. */ public class MonthsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE).args(DateV2Type.INSTANCE, DateV2Type.INSTANCE), @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new MonthsDiff(literal, child(1)); + } else { + return new MonthsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsSub.java index 1c2985a6e136e2..9c5824a1b9eebb 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/MonthsSub.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class MonthsSub extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() @@ -78,4 +79,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitMonthsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new MonthsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondCeil.java index 04cd08f4c8ce7b..3b0e657698743a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'second_ceil'. This class is generated by GenerateFunction. */ public class SecondCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -100,4 +101,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new SecondCeil(literal); + case 2: + return new SecondCeil(literal, child(1)); + case 3: + return new SecondCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondFloor.java index ae0af1106073ac..c06fff06aed016 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; @@ -36,7 +37,7 @@ * ScalarFunction 'second_floor'. This class is generated by GenerateFunction. */ public class SecondFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral { + implements ExplicitlyCastableSignature, AlwaysNullable, PropagateNullLiteral, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -101,4 +102,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new SecondFloor(literal); + case 2: + return new SecondFloor(literal, child(1)); + case 3: + return new SecondFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsAdd.java index a6e131f5263537..3afa8f134193ec 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsAdd.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'minutes_add'. */ public class SecondsAdd extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new SecondsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsDiff.java index 4dd7e12b9e2f32..c81999d4fa2c98 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'seconds_diff'. This class is generated by GenerateFunction. */ public class SecondsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE) @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new SecondsDiff(literal, child(1)); + } else { + return new SecondsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsSub.java index 37c59b2168bda2..d3093f84e1a2dc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/SecondsSub.java @@ -19,6 +19,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -38,7 +39,8 @@ * ScalarFunction 'Seconds_sub'. */ public class SecondsSub extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, + DateAddSubMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT) @@ -67,4 +69,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitSecondsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new SecondsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UnixTimestamp.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UnixTimestamp.java index 633e1e7d4f3bda..178187ad9cbc65 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UnixTimestamp.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/UnixTimestamp.java @@ -20,6 +20,9 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; +import org.apache.doris.nereids.trees.expressions.functions.Monotonic; +import org.apache.doris.nereids.trees.expressions.literal.DateTimeLiteral; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DataType; import org.apache.doris.nereids.types.DateTimeType; @@ -39,7 +42,8 @@ /** * ScalarFunction 'unix_timestamp'. This class is generated by GenerateFunction. */ -public class UnixTimestamp extends ScalarFunction implements ExplicitlyCastableSignature { +public class UnixTimestamp extends ScalarFunction implements ExplicitlyCastableSignature, Monotonic { + private static final DateTimeLiteral MAX = new DateTimeLiteral("2038-01-19 03:14:07"); // we got changes when computeSignature private static final List SIGNATURES = ImmutableList.of( @@ -145,4 +149,37 @@ public R accept(ExpressionVisitor visitor, C context) { public boolean isDeterministic() { return !this.children.isEmpty(); } + + @Override + public boolean isPositive() { + return true; + } + + @Override + public int getMonotonicFunctionChildIndex() { + return 0; + } + + @Override + public Expression withConstantArgs(Expression literal) { + return new UnixTimestamp(literal); + } + + @Override + public boolean isMonotonic(Literal lower, Literal upper) { + if (arity() != 1) { + return false; + } + if (null == lower) { + lower = DateTimeLiteral.MIN_DATETIME; + } + if (null == upper) { + upper = DateTimeLiteral.MAX_DATETIME; + } + if (lower.compareTo(MAX) <= 0 && upper.compareTo(MAX) > 0) { + return false; + } else { + return true; + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearCeil.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearCeil.java index bc294638be99cd..37c952af815d86 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearCeil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearCeil.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DateTimeType; @@ -36,7 +37,7 @@ * ScalarFunction 'year_ceil'. This class is generated by GenerateFunction. */ public class YearCeil extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable { + implements ExplicitlyCastableSignature, AlwaysNullable, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -105,4 +106,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearCeil(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new YearCeil(literal); + case 2: + return new YearCeil(literal, child(1)); + case 3: + return new YearCeil(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearFloor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearFloor.java index 5415502a769579..00a1ad918f7ffa 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearFloor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearFloor.java @@ -20,6 +20,7 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.AlwaysNullable; +import org.apache.doris.nereids.trees.expressions.functions.DateCeilFloorMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.DateTimeType; @@ -36,7 +37,7 @@ * ScalarFunction 'year_floor'. This class is generated by GenerateFunction. */ public class YearFloor extends ScalarFunction - implements ExplicitlyCastableSignature, AlwaysNullable { + implements ExplicitlyCastableSignature, AlwaysNullable, DateCeilFloorMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(DateTimeV2Type.SYSTEM_DEFAULT).args(DateTimeV2Type.SYSTEM_DEFAULT), @@ -105,4 +106,18 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearFloor(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + switch (arity()) { + case 1: + return new YearFloor(literal); + case 2: + return new YearFloor(literal, child(1)); + case 3: + return new YearFloor(literal, child(1), child(2)); + default: + throw new IllegalStateException("The function " + getName() + " has invalid child number."); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsAdd.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsAdd.java index 33c9e1c6dfa5e8..9b81378d9871bc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsAdd.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsAdd.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class YearsAdd extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() @@ -78,4 +79,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearsAdd(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new YearsAdd(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsDiff.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsDiff.java index e217d8da72902a..61b637449f1797 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsDiff.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsDiff.java @@ -19,8 +19,10 @@ import org.apache.doris.catalog.FunctionSignature; import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.functions.DateDiffMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; +import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; import org.apache.doris.nereids.trees.expressions.visitor.ExpressionVisitor; import org.apache.doris.nereids.types.BigIntType; @@ -37,7 +39,7 @@ * ScalarFunction 'years_diff'. This class is generated by GenerateFunction. */ public class YearsDiff extends ScalarFunction - implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args { + implements BinaryExpression, ExplicitlyCastableSignature, PropagateNullableOnDateLikeV2Args, DateDiffMonotonic { private static final List SIGNATURES = ImmutableList.of( FunctionSignature.ret(BigIntType.INSTANCE).args(DateV2Type.INSTANCE, DateV2Type.INSTANCE), @@ -73,4 +75,13 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearsDiff(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + if (child(1) instanceof Literal) { + return new YearsDiff(literal, child(1)); + } else { + return new YearsDiff(child(0), literal); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsSub.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsSub.java index b70444178df508..6f46727d937a28 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsSub.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/YearsSub.java @@ -21,6 +21,7 @@ import org.apache.doris.common.Config; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.functions.ComputeSignatureForDateArithmetic; +import org.apache.doris.nereids.trees.expressions.functions.DateAddSubMonotonic; import org.apache.doris.nereids.trees.expressions.functions.ExplicitlyCastableSignature; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullableOnDateLikeV2Args; import org.apache.doris.nereids.trees.expressions.shape.BinaryExpression; @@ -41,7 +42,7 @@ */ public class YearsSub extends ScalarFunction implements BinaryExpression, ExplicitlyCastableSignature, - ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args { + ComputeSignatureForDateArithmetic, PropagateNullableOnDateLikeV2Args, DateAddSubMonotonic { // When enable_date_conversion is true, we prefer to V2 signature. // This preference follows original planner. refer to ScalarType.getDefaultDateType() @@ -78,4 +79,9 @@ public List getSignatures() { public R accept(ExpressionVisitor visitor, C context) { return visitor.visitYearsSub(this, context); } + + @Override + public Expression withConstantArgs(Expression literal) { + return new YearsSub(literal, child(1)); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java index 27470187eae0d2..0a5c02409c113a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java @@ -43,11 +43,10 @@ * date time literal. */ public class DateTimeLiteral extends DateLiteral { + public static final DateTimeLiteral MIN_DATETIME = new DateTimeLiteral(0000, 1, 1, 0, 0, 0); + public static final DateTimeLiteral MAX_DATETIME = new DateTimeLiteral(9999, 12, 31, 23, 59, 59); protected static final int MAX_MICROSECOND = 999999; - private static final DateTimeLiteral MIN_DATETIME = new DateTimeLiteral(0000, 1, 1, 0, 0, 0); - private static final DateTimeLiteral MAX_DATETIME = new DateTimeLiteral(9999, 12, 31, 23, 59, 59); - private static final Logger LOG = LogManager.getLogger(DateTimeLiteral.class); protected long hour; diff --git a/regression-test/suites/nereids_rules_p0/partition_prune/test_add_sub_diff_ceil_floor.groovy b/regression-test/suites/nereids_rules_p0/partition_prune/test_add_sub_diff_ceil_floor.groovy new file mode 100644 index 00000000000000..bda9dc81af7cfe --- /dev/null +++ b/regression-test/suites/nereids_rules_p0/partition_prune/test_add_sub_diff_ceil_floor.groovy @@ -0,0 +1,407 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_add_sub_diff_ceil_floor") { + sql "set disable_nereids_rules='REWRITE_FILTER_EXPRESSION'" + sql "drop table if exists test_add_sub_diff_ceil_floor_t" + sql """create table test_add_sub_diff_ceil_floor_t (a int, dt datetime, d date, c varchar(100)) duplicate key(a) + partition by range(dt) ( + partition p1 values less than ("2017-01-01"), + partition p2 values less than ("2018-01-01"), + partition p3 values less than ("2019-01-01"), + partition p4 values less than ("2020-01-01"), + partition p5 values less than ("2021-01-01") + ) distributed by hash(a) properties("replication_num"="1");""" + sql """INSERT INTO test_add_sub_diff_ceil_floor_t SELECT number, + date_add('2016-01-01 00:00:00', interval number month), + cast(date_add('2022-01-01 00:00:00', interval number month) as date), cast(number as varchar(65533)) FROM numbers('number'='55');""" + sql "INSERT INTO test_add_sub_diff_ceil_floor_t values(3,null,null,null);" + + // xx_add + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_add(dt,1) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_add(dt,2) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_add(dt,10) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_add(dt,1) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minutes_add(dt,2) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where seconds_add(dt,10) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where milliseconds_add(dt,2) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where microseconds_add(dt,10) >'2019-01-01' """ + contains("partitions=3/5 (p3,p4,p5)") + } + // xx_sub + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_sub(dt,1) <='2018-01-01' """ + contains("4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_sub(dt,2) <='2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_sub(dt,10) <='2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_sub(dt,1) <='2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minutes_sub(dt,2) <= '2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where seconds_sub(dt,10) <= '2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where milliseconds_sub(dt,2) <= '2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where microseconds_sub(dt,10) <= '2018-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + + // xx_diff + // first arg is dt. positive + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff(dt,'2017-01-01') <2 """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minutes_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where seconds_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where milliseconds_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where microseconds_diff(dt,'2017-01-01') <2 """ + contains("partitions=2/5 (p1,p2)") + } + // second arg is dt. not positive + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',dt) <2 """ + contains("partitions=2/5 (p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minutes_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where seconds_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where milliseconds_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where microseconds_diff('2021-01-01',dt) <2 """ + contains("partitions=1/5 (p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',dt) <=2 """ + contains("partitions=3/5 (p3,p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_diff('2020-01-01',dt) >2 """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where days_diff('2020-01-01',dt) >=2 """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + + // xx_ceil + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_ceil(dt) <'2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where day_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hour_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minute_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where second_ceil(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + // xx_ceil with other args + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_ceil(dt,5) <'2019-01-01' """ + contains("partitions=1/5 (p1)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_ceil(dt,'2013-01-01') <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_ceil(dt,5,'2013-01-01') <'2019-01-01'""" + contains(" partitions=3/5 (p1,p2,p3)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hour_ceil(dt,c) <'2019-01-01' """ + contains("partitions=5/5 (p1,p2,p3,p4,p5)") + } + + // xx_floor + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where year_floor(dt) <='2019-01-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_floor(dt) <='2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where day_floor(dt) <='2019-01-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hour_floor(dt) <='2019-01-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where minute_floor(dt) <='2019-01-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where second_floor(dt) <'2019-01-01' """ + contains("partitions=3/5 (p1,p2,p3)") + } + + // xx_floor with other args + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_floor(dt,'2015-01-01') <='2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_floor(dt,5,'2015-01-01') <='2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where month_floor(dt,5) <='2019-02-01' """ + contains("partitions=4/5 (p1,p2,p3,p4)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hour_floor(dt,c,'2015-01-01') <='2019-01-01' """ + contains("partitions=5/5 (p1,p2,p3,p4,p5)") + } + + // diff nest function + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',month_ceil(hours_add(dt, 1))) <=2 """ + contains("partitions=4/5 (p2,p3,p4,p5)") + } + explain { + sql "select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',month_ceil(hours_sub(dt, 1))) <=2" + contains("partitions=4/5 (p1,p3,p4,p5)") + } + // mixed with non-function predicates + explain { + sql "select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',month_ceil(hours_sub(dt, 1))) <=2 and dt>'2019-06-01'" + contains("partitions=2/5 (p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where years_diff('2021-01-01',month_ceil(hours_sub(dt, 1))) <=2 and date_trunc(dt,'day')>'2019-06-01' """ + contains("partitions=2/5 (p4,p5)") + } + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where months_diff(months_add(dt,10), '2018-01-01') =2 """ + contains("partitions=1/5 (p2)") + } + + // hours_add second arg is not literal, so will not do pruning + explain { + sql """select * from test_add_sub_diff_ceil_floor_t where hours_add(dt, years_diff(dt,'2018-01-01')) <'2018-01-01' """ + contains("partitions=5/5 (p1,p2,p3,p4,p5)") + } + + // max + sql "drop table if exists max_t" + sql """create table max_t (a int, dt datetime, d date, c varchar(100)) duplicate key(a) + partition by range(dt) ( + partition p1 values less than ("2017-01-01"), + partition p2 values less than ("2018-01-01"), + partition p3 values less than ("2019-01-01"), + partition p4 values less than ("2020-01-01"), + partition p5 values less than ("2021-01-01"), + partition p6 values less than MAXVALUE + ) distributed by hash(a) properties("replication_num"="1");""" + sql """INSERT INTO max_t SELECT number, + date_add('2016-01-01 00:00:00', interval number month), + cast(date_add('2022-01-01 00:00:00', interval number month) as date), cast(number as varchar(65533)) FROM numbers('number'='100');""" + sql "INSERT INTO max_t values(3,null,null,null);" + + explain { + sql "select * from max_t where years_diff('2021-01-01',month_ceil(hours_add(dt, 1),'1990-01-05')) <=2 ;" + contains("partitions=5/6 (p2,p3,p4,p5,p6)") + } + explain { + sql "select * from max_t where years_diff('2021-01-01',month_ceil(hours_add(dt, 1),10,'1990-01-05')) <=2 ;" + contains("partitions=5/6 (p2,p3,p4,p5,p6)") + } + + explain { + sql """select * from max_t where years_diff('2021-01-01',month_ceil(hours_add(dt, 1),10,'1990-01-05')) <=2 and dt >'2018-01-01';""" + contains("partitions=4/6 (p3,p4,p5,p6)") + } + + explain { + sql """select * from max_t where months_diff('2021-01-01',month_floor(hours_add(dt, 1),10,'1990-01-05')) <=2;""" + contains("partitions=3/6 (p1,p5,p6)") + } + + explain { + sql """select * from max_t where months_diff('2021-01-01',month_floor(hours_add(dt, 1),12,'1000-01-01')) > 2""" + contains("partitions=5/6 (p1,p2,p3,p4,p5)") + } + explain { + sql """select * from max_t where months_diff('2021-01-01',month_floor(hours_add(dt, 1),12,'1000-01-01')) > 2 and month_floor(dt) >'2018-01-01' """ + contains("partitions=3/6 (p3,p4,p5)") + } + explain { + sql """select * from max_t where hours_sub(hours_add(dt, 1),1) >'2018-01-01' and days_diff(hours_sub(hours_add(dt, 1),1),'2021-01-01') >2""" + contains("partitions=1/6 (p6)") + } + + // from_days and unix_timestamp + explain { + sql """select * from max_t where unix_timestamp(dt) > 1547838847 """ + contains("partitions=3/6 (p4,p5,p6)") + } + + sql "drop table if exists partition_int_from_days" + sql """ + CREATE TABLE `partition_int_from_days` ( + `a` int NULL, + `b` int NULL + ) ENGINE=OLAP + DUPLICATE KEY(`a`, `b`) + PARTITION BY RANGE(`a`) + (PARTITION p1 VALUES [("-2147483648"), ("100000")), + PARTITION p2 VALUES [("100000"), ("738000")), + PARTITION p3 VALUES [("738000"), ("90000000")), + PARTITION p4 VALUES [("90000000"), (MAXVALUE))) + DISTRIBUTED BY HASH(`a`) BUCKETS 10 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1" + ); """ + sql """ + insert into partition_int_from_days values(100,100),(100022,1002),(738004,33),(90000003,89); + """ + explain { + sql """select * from partition_int_from_days where from_days(a)>'2020-07-29' """ + contains("partitions=3/4 (p1,p3,p4)") + } + + + sql "drop table if exists unix_time_t" + sql """create table unix_time_t (a int, dt datetime, d date, c varchar(100)) duplicate key(a) + partition by range(dt) ( + partition p1 values less than ("1980-01-01"), + partition p2 values less than ("2018-01-01"), + partition p3 values less than ("2039-01-01"), + partition p4 values less than MAXVALUE + ) distributed by hash(a) properties("replication_num"="1");""" + sql """INSERT INTO unix_time_t values(1,'1979-01-01','1979-01-01','abc'),(1,'2012-01-01','2012-01-01','abc'),(1,'2020-01-01','2020-01-01','abc'),(1,'2045-01-01','2045-01-01','abc')""" + sql "INSERT INTO unix_time_t values(3,null,null,null);" + explain { + sql """ select * from unix_time_t where unix_timestamp(dt) > 1514822400 """ + contains("partitions=2/4 (p3,p4)") + } + explain { + sql """select * from unix_time_t where unix_timestamp(dt) < 2147454847;""" + contains("partitions=4/4 (p1,p2,p3,p4)") + } + explain { + sql """select * from unix_time_t where unix_timestamp(dt) = 2147454847""" + contains("partitions=2/4 (p3,p4)") + } + explain { + sql """select * from unix_time_t where unix_timestamp(dt) = 2147454847 and dt<'2038-01-01'""" + contains("partitions=1/4 (p3)") + } + explain { + sql """select * from unix_time_t where unix_timestamp(dt) <=0""" + contains("partitions=3/4 (p1,p3,p4)") + } + +} \ No newline at end of file diff --git a/regression-test/suites/nereids_rules_p0/partition_prune/test_convert_tz.groovy b/regression-test/suites/nereids_rules_p0/partition_prune/test_convert_tz.groovy index c309d10d067194..3e033a78eb963c 100644 --- a/regression-test/suites/nereids_rules_p0/partition_prune/test_convert_tz.groovy +++ b/regression-test/suites/nereids_rules_p0/partition_prune/test_convert_tz.groovy @@ -46,13 +46,13 @@ suite("test_convert_tz") { } explain { sql "SELECT * FROM test_convert_tz WHERE convert_tz(timestamp, 'Asia/Shanghai', 'Europe/Paris') > '2021-01-01';"; - contains("partitions=2/3 (p2,p3)") + contains("partitions=3/3 (p1,p2,p3)") } explain { sql """SELECT * FROM test_convert_tz WHERE convert_tz(timestamp, 'Asia/Shanghai', 'Europe/Paris') < '2021-02-24' and convert_tz(timestamp, 'Asia/Shanghai', 'Europe/Paris') > '2021-01-01';""" - contains("partitions=2/3 (p2,p3)") + contains("partitions=3/3 (p1,p2,p3)") } explain { @@ -93,7 +93,7 @@ suite("test_convert_tz") { } explain { sql "SELECT * FROM test_convert_tz WHERE not convert_tz(timestamp, 'Asia/Shanghai', 'Europe/Paris') <= '2021-01-01';"; - contains("partitions=2/3 (p2,p3)") + contains("partitions=3/3 (p1,p2,p3)") } } } \ No newline at end of file From 82d021b80151140faaa51b02369506644379b043 Mon Sep 17 00:00:00 2001 From: "Mingyu Chen (Rayner)" Date: Sun, 22 Dec 2024 19:02:25 +0800 Subject: [PATCH 39/82] [fix](catalog) fix npe after replaying the external catalog (#45756) ### What problem does this PR solve? Related PR: #45433 Problem Summary: the `confLock` should be created after replaying in `gsonPostProcess()` of `ExternalCatalog`, or it will be null. --- .../doris/datasource/ExternalCatalog.java | 3 +- .../doris/datasource/ExternalCatalogTest.java | 40 +++++++++++++++++-- 2 files changed, 38 insertions(+), 5 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java index 2575169f79207f..d1df51177fd496 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/ExternalCatalog.java @@ -154,7 +154,7 @@ public abstract class ExternalCatalog protected PreExecutionAuthenticator preExecutionAuthenticator; private volatile Configuration cachedConf = null; - private final byte[] confLock = new byte[0]; + private byte[] confLock = new byte[0]; public ExternalCatalog() { } @@ -784,6 +784,7 @@ public void gsonPostProcess() throws IOException { } } this.propLock = new byte[0]; + this.confLock = new byte[0]; this.initialized = false; setDefaultPropsIfMissing(true); if (tableAutoAnalyzePolicy == null) { diff --git a/fe/fe-core/src/test/java/org/apache/doris/datasource/ExternalCatalogTest.java b/fe/fe-core/src/test/java/org/apache/doris/datasource/ExternalCatalogTest.java index 43348ca8a0e6ef..f8e72c366b55f7 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/datasource/ExternalCatalogTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/datasource/ExternalCatalogTest.java @@ -22,9 +22,10 @@ import org.apache.doris.catalog.Env; import org.apache.doris.catalog.PrimitiveType; import org.apache.doris.common.FeConstants; +import org.apache.doris.common.FeMetaVersion; import org.apache.doris.datasource.hive.HMSExternalCatalog; import org.apache.doris.datasource.test.TestExternalCatalog; -import org.apache.doris.mysql.privilege.Auth; +import org.apache.doris.meta.MetaContext; import org.apache.doris.qe.ConnectContext; import org.apache.doris.qe.QueryState.MysqlStateType; import org.apache.doris.qe.StmtExecutor; @@ -32,16 +33,20 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import org.apache.hadoop.conf.Configuration; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.File; +import java.nio.file.Files; import java.util.HashMap; import java.util.List; import java.util.Map; public class ExternalCatalogTest extends TestWithFeService { - private static Auth auth; - private static Env env; + private Env env; private CatalogMgr mgr; private ConnectContext rootCtx; @@ -51,7 +56,6 @@ protected void runBeforeAll() throws Exception { mgr = Env.getCurrentEnv().getCatalogMgr(); rootCtx = createDefaultCtx(); env = Env.getCurrentEnv(); - auth = env.getAuth(); // 1. create test catalog CreateCatalogStmt testCatalog = (CreateCatalogStmt) parseAndAnalyzeStmt( "create catalog test1 properties(\n" @@ -244,4 +248,32 @@ public Map>> getMetadata() { return MOCKED_META; } } + + @Test + public void testSerialization() throws Exception { + MetaContext metaContext = new MetaContext(); + metaContext.setMetaVersion(FeMetaVersion.VERSION_CURRENT); + metaContext.setThreadLocalInfo(); + + // 1. Write objects to file + File file = new File("./external_catalog_persist_test.dat"); + file.createNewFile(); + DataOutputStream dos = new DataOutputStream(Files.newOutputStream(file.toPath())); + + TestExternalCatalog ctl = (TestExternalCatalog) mgr.getCatalog("test1"); + ctl.write(dos); + dos.flush(); + dos.close(); + + // 2. Read objects from file + DataInputStream dis = new DataInputStream(Files.newInputStream(file.toPath())); + + TestExternalCatalog ctl2 = (TestExternalCatalog) ExternalCatalog.read(dis); + Configuration conf = ctl2.getConfiguration(); + Assertions.assertNotNull(conf); + + // 3. delete files + dis.close(); + file.delete(); + } } From 9296ce31020858c773e23d18398c8921ec1df94b Mon Sep 17 00:00:00 2001 From: Petrichor Date: Mon, 23 Dec 2024 10:12:59 +0800 Subject: [PATCH 40/82] [feat](nereids)implement useDatabase command in nereids (#45600) Issue Number: close https://github.com/apache/doris/issues/42523 --- .../org/apache/doris/nereids/DorisParser.g4 | 4 +- .../nereids/parser/LogicalPlanBuilder.java | 20 ++- .../doris/nereids/trees/plans/PlanType.java | 3 +- .../trees/plans/commands/use/UseCommand.java | 115 ++++++++++++++++++ .../trees/plans/visitor/CommandVisitor.java | 5 + .../nereids/parser/NereidsParserTest.java | 8 +- .../ddl/use/use_command_nereids.out | 13 ++ .../ddl/use/use_command_nereids.groovy | 79 ++++++++++++ 8 files changed, 236 insertions(+), 11 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/use/UseCommand.java create mode 100644 regression-test/data/nereids_p0/ddl/use/use_command_nereids.out create mode 100644 regression-test/suites/nereids_p0/ddl/use/use_command_nereids.groovy diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index 97876c231fec69..368847bac5f270 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -870,11 +870,11 @@ supportedUnsetStatement supportedUseStatement : SWITCH catalog=identifier #switchCatalog + | USE (catalog=identifier DOT)? database=identifier #useDatabase ; unsupportedUseStatement - : USE (catalog=identifier DOT)? database=identifier #useDatabase - | USE ((catalog=identifier DOT)? database=identifier)? ATSIGN cluster=identifier #useCloudCluster + : USE ((catalog=identifier DOT)? database=identifier)? ATSIGN cluster=identifier #useCloudCluster ; unsupportedDmlStatement diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index bb344e1b376deb..7bc328e238d99d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -324,6 +324,7 @@ import org.apache.doris.nereids.DorisParser.UpdateAssignmentContext; import org.apache.doris.nereids.DorisParser.UpdateAssignmentSeqContext; import org.apache.doris.nereids.DorisParser.UpdateContext; +import org.apache.doris.nereids.DorisParser.UseDatabaseContext; import org.apache.doris.nereids.DorisParser.UserIdentifyContext; import org.apache.doris.nereids.DorisParser.UserVariableContext; import org.apache.doris.nereids.DorisParser.WhereClauseContext; @@ -683,6 +684,7 @@ import org.apache.doris.nereids.trees.plans.commands.refresh.RefreshDatabaseCommand; import org.apache.doris.nereids.trees.plans.commands.refresh.RefreshTableCommand; import org.apache.doris.nereids.trees.plans.commands.use.SwitchCommand; +import org.apache.doris.nereids.trees.plans.commands.use.UseCommand; import org.apache.doris.nereids.trees.plans.logical.LogicalAggregate; import org.apache.doris.nereids.trees.plans.logical.LogicalCTE; import org.apache.doris.nereids.trees.plans.logical.LogicalExcept; @@ -5178,12 +5180,20 @@ public LogicalPlan visitShowQueryProfile(ShowQueryProfileContext ctx) { } @Override - public Object visitSwitchCatalog(SwitchCatalogContext ctx) { - String catalogName = ctx.catalog.getText(); - if (catalogName != null) { - return new SwitchCommand(catalogName); + public LogicalPlan visitSwitchCatalog(SwitchCatalogContext ctx) { + if (ctx.catalog != null) { + return new SwitchCommand(ctx.catalog.getText()); } - throw new AnalysisException("catalog name can not be null"); + throw new ParseException("catalog name can not be null"); + } + + @Override + public LogicalPlan visitUseDatabase(UseDatabaseContext ctx) { + if (ctx.database == null) { + throw new ParseException("database name can not be null"); + } + return ctx.catalog != null ? new UseCommand(ctx.catalog.getText(), ctx.database.getText()) + : new UseCommand(ctx.database.getText()); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index dfc129f10b0fd6..407610fbe08add 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -266,5 +266,6 @@ public enum PlanType { CREATE_ROUTINE_LOAD_COMMAND, SHOW_TABLE_CREATION_COMMAND, SHOW_QUERY_PROFILE_COMMAND, - SWITCH_COMMAND + SWITCH_COMMAND, + USE_COMMAND } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/use/UseCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/use/UseCommand.java new file mode 100644 index 00000000000000..9223e7d5ad66ed --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/use/UseCommand.java @@ -0,0 +1,115 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands.use; + +import org.apache.doris.analysis.StmtType; +import org.apache.doris.catalog.Env; +import org.apache.doris.common.AnalysisException; +import org.apache.doris.common.DdlException; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.common.ErrorReport; +import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.commands.Command; +import org.apache.doris.nereids.trees.plans.commands.NoForward; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.StmtExecutor; + +import com.google.common.base.Strings; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * Representation of a use db statement. + */ +public class UseCommand extends Command implements NoForward { + private static final Logger LOG = LogManager.getLogger(UseCommand.class); + private String catalogName; + private String databaseName; + + public UseCommand(String databaseName) { + super(PlanType.USE_COMMAND); + this.databaseName = databaseName; + } + + public UseCommand(String catalogName, String databaseName) { + super(PlanType.USE_COMMAND); + this.catalogName = catalogName; + this.databaseName = databaseName; + } + + @Override + public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { + validate(ctx); + handleUseStmt(ctx); + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitUseCommand(this, context); + } + + @Override + public StmtType stmtType() { + return StmtType.USE; + } + + private void validate(ConnectContext context) throws AnalysisException { + if (Strings.isNullOrEmpty(databaseName)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_DB_ERROR); + } + String currentCatalogName = catalogName == null ? ConnectContext.get().getDefaultCatalog() : catalogName; + + if (!Env.getCurrentEnv().getAccessManager() + .checkDbPriv(ConnectContext.get(), currentCatalogName, databaseName, PrivPredicate.SHOW)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, context.getQualifiedUser(), + databaseName); + } + } + + /** + * Process use statement. + */ + private void handleUseStmt(ConnectContext context) { + try { + if (catalogName != null) { + context.getEnv().changeCatalog(context, catalogName); + } + context.getEnv().changeDb(context, databaseName); + } catch (DdlException e) { + LOG.warn("The handling of the use command failed.", e); + context.getState().setError(e.getMysqlErrorCode(), e.getMessage()); + return; + } + context.getState().setOk(); + } + + /** + * Generate sql string. + */ + public String toSql() { + StringBuilder sb = new StringBuilder(); + sb.append("USE "); + if (catalogName != null) { + sb.append("`").append(catalogName).append("`."); + } + sb.append("`").append(databaseName).append("`"); + return sb.toString(); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index d3749e94d57d0f..122e513a08cb57 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -151,6 +151,7 @@ import org.apache.doris.nereids.trees.plans.commands.refresh.RefreshDatabaseCommand; import org.apache.doris.nereids.trees.plans.commands.refresh.RefreshTableCommand; import org.apache.doris.nereids.trees.plans.commands.use.SwitchCommand; +import org.apache.doris.nereids.trees.plans.commands.use.UseCommand; /** CommandVisitor. */ public interface CommandVisitor { @@ -697,4 +698,8 @@ default R visitShowQueryProfileCommand(ShowQueryProfileCommand showQueryProfileC default R visitSwitchCommand(SwitchCommand switchCommand, C context) { return visitCommand(switchCommand, context); } + + default R visitUseCommand(UseCommand useCommand, C context) { + return visitCommand(useCommand, context); + } } diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/parser/NereidsParserTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/parser/NereidsParserTest.java index 9a46b810586eec..3ce7e64560ce1b 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/parser/NereidsParserTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/parser/NereidsParserTest.java @@ -448,7 +448,7 @@ public void testParseStmtType() { sql = "use a"; plan = nereidsParser.parseSingle(sql); - Assertions.assertEquals(plan.stmtType(), StmtType.OTHER); + Assertions.assertEquals(plan.stmtType(), StmtType.USE); sql = "CREATE TABLE tbl (`id` INT NOT NULL) DISTRIBUTED BY HASH(`id`) BUCKETS 1"; plan = nereidsParser.parseSingle(sql); @@ -463,10 +463,12 @@ public void testParseStmtType() { public void testParseUse() { NereidsParser nereidsParser = new NereidsParser(); String sql = "use db"; - nereidsParser.parseSingle(sql); + LogicalPlan logicalPlan = nereidsParser.parseSingle(sql); + Assertions.assertEquals(logicalPlan.stmtType(), StmtType.USE); sql = "use catalog.db"; - nereidsParser.parseSingle(sql); + LogicalPlan logicalPlan1 = nereidsParser.parseSingle(sql); + Assertions.assertEquals(logicalPlan1.stmtType(), StmtType.USE); } @Test diff --git a/regression-test/data/nereids_p0/ddl/use/use_command_nereids.out b/regression-test/data/nereids_p0/ddl/use/use_command_nereids.out new file mode 100644 index 00000000000000..17a7eaf6d7e12d --- /dev/null +++ b/regression-test/data/nereids_p0/ddl/use/use_command_nereids.out @@ -0,0 +1,13 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !show_tables_db1 -- +tb1 + +-- !show_tables_db2 -- +tb2 + +-- !show_tables_db1 -- +tb1 + +-- !show_tables_db2 -- +tb2 + diff --git a/regression-test/suites/nereids_p0/ddl/use/use_command_nereids.groovy b/regression-test/suites/nereids_p0/ddl/use/use_command_nereids.groovy new file mode 100644 index 00000000000000..70e0f3403e5855 --- /dev/null +++ b/regression-test/suites/nereids_p0/ddl/use/use_command_nereids.groovy @@ -0,0 +1,79 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("use_command_nereids") { + String db1 = "test_use_command_db1" + String db2 = "test_use_command_db2" + String tbl1 = "tb1" + String tbl2 = "tb2" + + sql """drop database if exists ${db1};""" + sql """drop database if exists ${db2};""" + // create database + sql """create database ${db1};""" + sql """create database ${db2};""" + //cloud-mode + if (isCloudMode()) { + return + } + // use command + checkNereidsExecute("use ${db1};") + + """drop table if exists ${tbl1};""" + sql """ create table ${db1}.${tbl1} + ( + c1 bigint, + c2 bigint + ) + ENGINE=OLAP + DUPLICATE KEY(c1, c2) + COMMENT 'OLAP' + DISTRIBUTED BY HASH(c1) BUCKETS 1 + PROPERTIES ( + "replication_num" = "1" + ); + """ + qt_show_tables_db1 """show tables;""" + + checkNereidsExecute("use ${db2};") + """drop table if exists ${tbl2};""" + sql """ create table ${db2}.${tbl2} + ( + c1 bigint, + c2 bigint + ) + ENGINE=OLAP + DUPLICATE KEY(c1, c2) + COMMENT 'OLAP' + DISTRIBUTED BY HASH(c1) BUCKETS 1 + PROPERTIES ( + "replication_num" = "1" + ); + """ + + qt_show_tables_db2 """show tables;""" + + checkNereidsExecute("use internal.${db1};") + qt_show_tables_db1 """show tables;""" + checkNereidsExecute("use internal.${db2};") + qt_show_tables_db2 """show tables;""" + + sql """drop table if exists ${db1}.${tbl1};""" + sql """drop table if exists ${db2}.${tbl2};""" + sql """drop database if exists ${db1};""" + sql """drop database if exists ${db2};""" +} \ No newline at end of file From 49d397b8a087a2f72d6d78ab0f5a73f0b1bcbdab Mon Sep 17 00:00:00 2001 From: "Mingyu Chen (Rayner)" Date: Mon, 23 Dec 2024 10:19:37 +0800 Subject: [PATCH 41/82] [fix](hudi) remove session variable field in HudiScanNode (#45762) ### What problem does this PR solve? Related PR: #45355 Problem Summary: The `sessionVariable` field is already in parent class `FileQueryScanNode`, remove it from `HudiScanNode`. --- .../doris/datasource/hudi/source/HudiScanNode.java | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java index 486fdea74a00bb..b1eb47095f33c4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java @@ -91,8 +91,6 @@ public class HudiScanNode extends HiveScanNode { private final AtomicLong noLogsSplitNum = new AtomicLong(0); - private final boolean useHiveSyncPartition; - private HoodieTableMetaClient hudiClient; private String basePath; private String inputFormat; @@ -102,7 +100,6 @@ public class HudiScanNode extends HiveScanNode { private boolean partitionInit = false; private HoodieTimeline timeline; - private Option snapshotTimestamp; private String queryInstant; private final AtomicReference batchException = new AtomicReference<>(null); @@ -113,7 +110,6 @@ public class HudiScanNode extends HiveScanNode { private boolean incrementalRead = false; private TableScanParams scanParams; private IncrementalRelation incrementalRelation; - private SessionVariable sessionVariable; /** * External file scan node for Query Hudi table @@ -125,8 +121,8 @@ public class HudiScanNode extends HiveScanNode { */ public HudiScanNode(PlanNodeId id, TupleDescriptor desc, boolean needCheckColumnPriv, Optional scanParams, Optional incrementalRelation, - SessionVariable sessionVariable) { - super(id, desc, "HUDI_SCAN_NODE", StatisticalType.HUDI_SCAN_NODE, needCheckColumnPriv, sessionVariable); + SessionVariable sv) { + super(id, desc, "HUDI_SCAN_NODE", StatisticalType.HUDI_SCAN_NODE, needCheckColumnPriv, sv); isCowTable = hmsTable.isHoodieCowTable(); if (LOG.isDebugEnabled()) { if (isCowTable) { @@ -136,11 +132,9 @@ public HudiScanNode(PlanNodeId id, TupleDescriptor desc, boolean needCheckColumn hmsTable.getFullQualifiers()); } } - useHiveSyncPartition = hmsTable.useHiveSyncPartition(); this.scanParams = scanParams.orElse(null); this.incrementalRelation = incrementalRelation.orElse(null); this.incrementalRead = (this.scanParams != null && this.scanParams.incrementalRead()); - this.sessionVariable = sessionVariable; } @Override @@ -215,7 +209,6 @@ protected void doInitialize() throws UserException { throw new UserException("Hudi does not support `FOR VERSION AS OF`, please use `FOR TIME AS OF`"); } queryInstant = tableSnapshot.getTime().replaceAll("[-: ]", ""); - snapshotTimestamp = Option.of(queryInstant); } else { Option snapshotInstant = timeline.lastInstant(); if (!snapshotInstant.isPresent()) { @@ -224,7 +217,6 @@ protected void doInitialize() throws UserException { return; } queryInstant = snapshotInstant.get().getTimestamp(); - snapshotTimestamp = Option.empty(); } } From 0fe8d7b663e671b1e6ae196457cec8102504f533 Mon Sep 17 00:00:00 2001 From: zzzxl Date: Mon, 23 Dec 2024 10:24:01 +0800 Subject: [PATCH 42/82] [fix](inverted index) Fix the issue with incorrect seek results in DICT_COMPRESS (#45738) Related PR: https://github.com/apache/doris/pull/44414 Problem Summary: In inverted index version 3 mode, using dictionary compression may lead to incorrect results after a seek operation. --- be/src/clucene | 2 +- .../test_inverted_index_v3.out | 12 +++++++++ .../test_inverted_index_v3.groovy | 26 +++++++++++++++++++ 3 files changed, 39 insertions(+), 1 deletion(-) diff --git a/be/src/clucene b/be/src/clucene index a506dbb6c523aa..2204eaec46a68e 160000 --- a/be/src/clucene +++ b/be/src/clucene @@ -1 +1 @@ -Subproject commit a506dbb6c523aa65044eb1c527a066d236172543 +Subproject commit 2204eaec46a68e5e9a1876b7021f24839ecb2cf0 diff --git a/regression-test/data/inverted_index_p0/test_inverted_index_v3.out b/regression-test/data/inverted_index_p0/test_inverted_index_v3.out index 9dc20f3e0e0a85..53f4eb7ae0a667 100644 --- a/regression-test/data/inverted_index_p0/test_inverted_index_v3.out +++ b/regression-test/data/inverted_index_p0/test_inverted_index_v3.out @@ -23,3 +23,15 @@ -- !sql -- 105 +-- !sql -- +238 + +-- !sql -- +104 + +-- !sql -- +104 + +-- !sql -- +105 + diff --git a/regression-test/suites/inverted_index_p0/test_inverted_index_v3.groovy b/regression-test/suites/inverted_index_p0/test_inverted_index_v3.groovy index ea7dd0b595f504..82389d84e3cd67 100644 --- a/regression-test/suites/inverted_index_p0/test_inverted_index_v3.groovy +++ b/regression-test/suites/inverted_index_p0/test_inverted_index_v3.groovy @@ -19,9 +19,11 @@ suite("test_inverted_index_v3", "p0"){ def indexTbName1 = "test_inverted_index_v3_1" def indexTbName2 = "test_inverted_index_v3_2" + def indexTbName3 = "test_inverted_index_v3_3" sql "DROP TABLE IF EXISTS ${indexTbName1}" sql "DROP TABLE IF EXISTS ${indexTbName2}" + sql "DROP TABLE IF EXISTS ${indexTbName3}" sql """ CREATE TABLE ${indexTbName1} ( @@ -59,6 +61,24 @@ suite("test_inverted_index_v3", "p0"){ ); """ + sql """ + CREATE TABLE ${indexTbName3} ( + `@timestamp` int(11) NULL COMMENT "", + `clientip` varchar(20) NULL COMMENT "", + `request` text NULL COMMENT "", + `status` int(11) NULL COMMENT "", + `size` int(11) NULL COMMENT "", + INDEX request_idx (`request`) USING INVERTED PROPERTIES("parser" = "english", "support_phrase" = "true", "dict_compression" = "true") COMMENT '' + ) ENGINE=OLAP + DUPLICATE KEY(`@timestamp`) + COMMENT "OLAP" + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "inverted_index_storage_format" = "V3" + ); + """ + def load_httplogs_data = {table_name, label, read_flag, format_flag, file_name, ignore_failure=false, expected_succ_rows = -1, load_to_single_tablet = 'true' -> @@ -99,6 +119,7 @@ suite("test_inverted_index_v3", "p0"){ try { load_httplogs_data.call(indexTbName1, indexTbName1, 'true', 'json', 'documents-1000.json') load_httplogs_data.call(indexTbName2, indexTbName2, 'true', 'json', 'documents-1000.json') + load_httplogs_data.call(indexTbName3, indexTbName3, 'true', 'json', 'documents-1000.json') sql "sync" @@ -112,6 +133,11 @@ suite("test_inverted_index_v3", "p0"){ qt_sql """ select count() from ${indexTbName2} where request match_phrase 'hm bg'; """ qt_sql """ select count() from ${indexTbName2} where request match_phrase_prefix 'hm bg'; """ + qt_sql """ select count() from ${indexTbName3} where request match_any 'hm bg'; """ + qt_sql """ select count() from ${indexTbName3} where request match_all 'hm bg'; """ + qt_sql """ select count() from ${indexTbName3} where request match_phrase 'hm bg'; """ + qt_sql """ select count() from ${indexTbName3} where request match_phrase_prefix 'hm bg'; """ + } finally { } } \ No newline at end of file From 0527a2e5abef95418afd77920aee66f8bd6f8318 Mon Sep 17 00:00:00 2001 From: deardeng Date: Mon, 23 Dec 2024 10:26:31 +0800 Subject: [PATCH 43/82] [fix](tabletScheduler) Fix addTablet dead lock in tabletScheduler (#45298) The conditions that need to be met to trigger the bug, with the second condition being somewhat difficult to trigger, are as follows: 1. The number of tablets that need to be fixed exceeds 2000 (in the pending queue); 2. The scheduling of the lowest priority in the pending queue has previously experienced a clone failure, with fewer than 3 failures, and has been put back into the pending queue. Additionally, a new scheduling request that happens to belong to the same table as the previous one has a higher priority than the previous scheduling. The fix is to write the lock trylock in finalize TabletCtx. If the lock cannot be obtained, the current scheduling will fail and the next one will be rescheduled Fix ``` "colocate group clone checker" #7557 daemon prio=5 os_prio=0 cpu=686.24ms elapsed=6719.45s tid=0x00007f3e6c039ab0 nid=0x17b08 waiting on condition [0x00007f3ec77fe000] (1 similar threads) java.lang.Thread.State: WAITING (parking) at jdk.internal.misc.Unsafe.park(java.base@17.0.2/Native Method) - parking to wait for <0x000010014d223908> (a java.util.concurrent.locks.ReentrantReadWriteLock$FairSync) at java.util.concurrent.locks.LockSupport.park(java.base@17.0.2/LockSupport.java:211) at java.util.concurrent.locks.AbstractQueuedSynchronizer.acquire(java.base@17.0.2/AbstractQueuedSynchronizer.java:715) at java.util.concurrent.locks.AbstractQueuedSynchronizer.acquire(java.base@17.0.2/AbstractQueuedSynchronizer.java:938) at java.util.concurrent.locks.ReentrantReadWriteLock$WriteLock.lock(java.base@17.0.2/ReentrantReadWriteLock.java:959) at org.apache.doris.common.lock.MonitoredReentrantReadWriteLock$WriteLock.lock(MonitoredReentrantReadWriteLock.java:98) at org.apache.doris.catalog.Table.writeLockIfExist(Table.java:211) at org.apache.doris.clone.TabletSchedCtx.releaseResource(TabletSchedCtx.java:940) at org.apache.doris.clone.TabletSchedCtx.releaseResource(TabletSchedCtx.java:898) at org.apache.doris.clone.TabletScheduler.releaseTabletCtx(TabletScheduler.java:1743) at org.apache.doris.clone.TabletScheduler.finalizeTabletCtx(TabletScheduler.java:1625) at org.apache.doris.clone.TabletScheduler.addTablet(TabletScheduler.java:287) - locked <0x0000100009429110> (a org.apache.doris.clone.TabletScheduler) at org.apache.doris.clone.ColocateTableCheckerAndBalancer.matchGroups(ColocateTableCheckerAndBalancer.java:563) at org.apache.doris.clone.ColocateTableCheckerAndBalancer.runAfterCatalogReady(ColocateTableCheckerAndBalancer.java:340) at org.apache.doris.common.util.MasterDaemon.runOneCycle(MasterDaemon.java:58) at org.apache.doris.common.util.Daemon.run(Daemon.java:119) ``` --- be/src/olap/task/engine_clone_task.cpp | 10 ++++ .../apache/doris/clone/TabletSchedCtx.java | 31 ++--------- .../apache/doris/clone/TabletScheduler.java | 9 ++-- .../apache/doris/clone/TabletHealthTest.java | 52 +++++++++++++++++++ .../doris/utframe/MockedBackendFactory.java | 5 ++ 5 files changed, 74 insertions(+), 33 deletions(-) diff --git a/be/src/olap/task/engine_clone_task.cpp b/be/src/olap/task/engine_clone_task.cpp index fa8d9b8248e3f4..9af3e078d3aefa 100644 --- a/be/src/olap/task/engine_clone_task.cpp +++ b/be/src/olap/task/engine_clone_task.cpp @@ -171,6 +171,16 @@ Status EngineCloneTask::_do_clone() { auto duration = std::chrono::milliseconds(dp->param("duration", 10 * 1000)); std::this_thread::sleep_for(duration); }); + + DBUG_EXECUTE_IF("EngineCloneTask.failed_clone", { + LOG_WARNING("EngineCloneTask.failed_clone") + .tag("tablet_id", _clone_req.tablet_id) + .tag("replica_id", _clone_req.replica_id) + .tag("version", _clone_req.version); + return Status::InternalError( + "in debug point, EngineCloneTask.failed_clone tablet={}, replica={}, version={}", + _clone_req.tablet_id, _clone_req.replica_id, _clone_req.version); + }); Status status = Status::OK(); string src_file_path; TBackend src_host; diff --git a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletSchedCtx.java b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletSchedCtx.java index a6ba294e80934c..b8a098cc891dee 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletSchedCtx.java +++ b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletSchedCtx.java @@ -62,6 +62,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; /* * TabletSchedCtx contains all information which is created during tablet scheduler processing. @@ -69,28 +70,6 @@ public class TabletSchedCtx implements Comparable { private static final Logger LOG = LogManager.getLogger(TabletSchedCtx.class); - /* - * SCHED_FAILED_COUNTER_THRESHOLD: - * threshold of times a tablet failed to be scheduled - * - * MIN_ADJUST_PRIORITY_INTERVAL_MS: - * min interval time of adjusting a tablet's priority - * - * MAX_NOT_BEING_SCHEDULED_INTERVAL_MS: - * max gap time of a tablet NOT being scheduled. - * - * These 3 params is for adjusting priority. - * If a tablet being scheduled failed for more than SCHED_FAILED_COUNTER_THRESHOLD times, its priority - * will be downgraded. And the interval between adjustment is larger than MIN_ADJUST_PRIORITY_INTERVAL_MS, - * to avoid being downgraded too soon. - * And if a tablet is not being scheduled longer than MAX_NOT_BEING_SCHEDULED_INTERVAL_MS, its priority - * will be upgraded, to avoid starvation. - * - */ - private static final int SCHED_FAILED_COUNTER_THRESHOLD = 5; - private static final long MIN_ADJUST_PRIORITY_INTERVAL_MS = 5 * 60 * 1000L; // 5 min - private static final long MAX_NOT_BEING_SCHEDULED_INTERVAL_MS = 30 * 60 * 1000L; // 30 min - /* * A clone task timeout is between Config.min_clone_task_timeout_sec and Config.max_clone_task_timeout_sec, * estimated by tablet size / MIN_CLONE_SPEED_MB_PER_SECOND. @@ -450,10 +429,6 @@ public void setSchedFailedCode(SubCode code) { schedFailedCode = code; } - public CloneTask getCloneTask() { - return cloneTask; - } - public long getCopySize() { return copySize; } @@ -932,12 +907,14 @@ public void releaseResource(TabletScheduler tabletScheduler, boolean reserveTabl } if (cloneTask != null) { AgentTaskQueue.removeTask(cloneTask.getBackendId(), TTaskType.CLONE, cloneTask.getSignature()); + cloneTask = null; // clear all CLONE replicas Database db = Env.getCurrentInternalCatalog().getDbNullable(dbId); if (db != null) { Table table = db.getTableNullable(tblId); - if (table != null && table.writeLockIfExist()) { + // try get table write lock, if failed TabletScheduler will try next time + if (table != null && table.tryWriteLockIfExist(Table.TRY_LOCK_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { try { List cloneReplicas = Lists.newArrayList(); tablet.getReplicas().stream().filter(r -> r.getState() == ReplicaState.CLONE).forEach(r -> { diff --git a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java index 1545236aa59cd0..dc07ddb0be4d30 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java +++ b/fe/fe-core/src/main/java/org/apache/doris/clone/TabletScheduler.java @@ -105,9 +105,6 @@ public class TabletScheduler extends MasterDaemon { private static final Logger LOG = LogManager.getLogger(TabletScheduler.class); - // handle at most BATCH_NUM tablets in one loop - private static final int MIN_BATCH_NUM = 50; - // the minimum interval of updating cluster statistics and priority of tablet info private static final long STAT_UPDATE_INTERVAL_MS = 20 * 1000; // 20s @@ -151,7 +148,7 @@ public enum AddResult { ADDED, // success to add ALREADY_IN, // already added, skip LIMIT_EXCEED, // number of pending tablets exceed the limit - REPLACE_ADDED, // succ to add, and envit a lowest task + REPLACE_ADDED, // succ to add, and evict a lowest task DISABLED // scheduler has been disabled. } @@ -292,7 +289,7 @@ public synchronized AddResult addTablet(TabletSchedCtx tablet, boolean force) { addResult = AddResult.REPLACE_ADDED; pendingTablets.pollLast(); finalizeTabletCtx(lowestPriorityTablet, TabletSchedCtx.State.CANCELLED, Status.UNRECOVERABLE, - "envit lower priority sched tablet because pending queue is full"); + "evict lower priority sched tablet because pending queue is full"); } if (!contains || tablet.getType() == TabletSchedCtx.Type.REPAIR) { @@ -1868,9 +1865,9 @@ public boolean finishCloneTask(CloneTask cloneTask, TFinishTaskRequest request) tabletCtx.increaseFailedRunningCounter(); if (!tabletCtx.isExceedFailedRunningLimit()) { stat.counterCloneTaskFailed.incrementAndGet(); + tabletCtx.setState(TabletSchedCtx.State.PENDING); tabletCtx.releaseResource(this); tabletCtx.resetFailedSchedCounter(); - tabletCtx.setState(TabletSchedCtx.State.PENDING); addBackToPendingTablets(tabletCtx); return false; } else { diff --git a/fe/fe-core/src/test/java/org/apache/doris/clone/TabletHealthTest.java b/fe/fe-core/src/test/java/org/apache/doris/clone/TabletHealthTest.java index b22925e5d89270..320bff45229fba 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/clone/TabletHealthTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/clone/TabletHealthTest.java @@ -40,12 +40,14 @@ import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import com.google.common.collect.MinMaxPriorityQueue; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; public class TabletHealthTest extends TestWithFeService { @@ -78,6 +80,8 @@ protected void runBeforeAll() throws Exception { @Override protected void runBeforeEach() throws Exception { + // set back to default value + Config.max_scheduling_tablets = 2000; for (Table table : db.getTables()) { dropTable(table.getName(), true); } @@ -358,4 +362,52 @@ public void testColocateTabletHealth() throws Exception { dropTable(table.getName(), true); } + + @Test + public void testAddTabletNoDeadLock() throws Exception { + Config.max_scheduling_tablets = 1; + createTable("CREATE TABLE tbl3 (k INT) DISTRIBUTED BY HASH(k) BUCKETS 2" + + " PROPERTIES ('replication_num' = '3')"); + DebugPointUtil.addDebugPoint("MockedBackendFactory.handleCloneTablet.failed"); + OlapTable table = (OlapTable) db.getTableOrMetaException("tbl3"); + Partition partition = table.getPartitions().iterator().next(); + List tablets = partition.getMaterializedIndices(IndexExtState.ALL).iterator().next().getTablets(); + Assertions.assertEquals(2, tablets.size()); + + partition.updateVisibleVersion(10L); + tablets.forEach(tablet -> tablet.getReplicas().forEach(replica -> replica.updateVersion(10))); + + Tablet tabletA = tablets.get(0); + Tablet tabletB = tablets.get(1); + TabletScheduler scheduler = Env.getCurrentEnv().getTabletScheduler(); + tabletA.getReplicas().get(0).adminUpdateVersionInfo(8L, null, null, 0L); + checkTabletStatus(tabletA, TabletStatus.VERSION_INCOMPLETE, table, partition); + Env.getCurrentEnv().getTabletChecker().runAfterCatalogReady(); + Env.getCurrentEnv().getTabletScheduler().runAfterCatalogReady(); + Thread.sleep(1000); + MinMaxPriorityQueue queue = scheduler.getPendingTabletQueue(); + TabletSchedCtx tabletACtx = queue.peekFirst(); + Assertions.assertNotNull(tabletACtx); + tabletACtx.setLastVisitedTime(System.currentTimeMillis() + 3600 * 1000L); + tabletB.getReplicas().get(0).adminUpdateVersionInfo(8L, null, null, 0L); + checkTabletStatus(tabletB, TabletStatus.VERSION_INCOMPLETE, table, partition); + Thread thread = new Thread(() -> { + try { + Env.getCurrentEnv().getTabletChecker().runAfterCatalogReady(); + Env.getCurrentEnv().getTabletScheduler().runAfterCatalogReady(); + } catch (Exception e) { + e.printStackTrace(); + } + }); + thread.start(); + Thread.sleep(1000); + Assertions.assertTrue(table.tryWriteLock(2, TimeUnit.SECONDS)); + table.writeUnlock(); + DebugPointUtil.clearDebugPoints(); + doRepair(); + Thread.sleep(1000); + doRepair(); + checkTabletIsHealth(tabletA, table, partition); + checkTabletIsHealth(tabletB, table, partition); + } } diff --git a/fe/fe-core/src/test/java/org/apache/doris/utframe/MockedBackendFactory.java b/fe/fe-core/src/test/java/org/apache/doris/utframe/MockedBackendFactory.java index 9e8ff913ada8ac..1a9a175366e528 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/utframe/MockedBackendFactory.java +++ b/fe/fe-core/src/test/java/org/apache/doris/utframe/MockedBackendFactory.java @@ -95,6 +95,7 @@ import org.apache.thrift.TException; import java.io.IOException; +import java.util.Collections; import java.util.List; import java.util.Random; import java.util.concurrent.BlockingQueue; @@ -305,6 +306,10 @@ private void handleCloneTablet(TAgentTaskRequest request, TFinishTaskRequest fin tabletInfo.setPathHash(pathHash); tabletInfo.setUsed(true); tabletInfos.add(tabletInfo); + if (DebugPointUtil.isEnable("MockedBackendFactory.handleCloneTablet.failed")) { + finishTaskRequest.setTaskStatus(new TStatus(TStatusCode.CANCELLED)); + finishTaskRequest.getTaskStatus().setErrorMsgs(Collections.singletonList("debug point set")); + } finishTaskRequest.setFinishTabletInfos(tabletInfos); } From 4fc9f92defaae41667e1a2eb3b3a0f1f451e99fd Mon Sep 17 00:00:00 2001 From: lihangyu Date: Mon, 23 Dec 2024 10:38:14 +0800 Subject: [PATCH 44/82] [Improve](Variant) pick random backend as coordinator (#45754) pick random rpc coordinator to do fetch_remote_tablet_schema service --- .../common/util/FetchRemoteTabletSchemaUtil.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java b/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java index 4a0b9d1ff5950d..00147207c143db 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/util/FetchRemoteTabletSchemaUtil.java @@ -98,16 +98,19 @@ public List fetch() { if (!backend.isAlive()) { continue; } - // need 2 be to provide a retry - if (coordinatorBackend.size() < 2) { - coordinatorBackend.add(backend); - } + coordinatorBackend.add(backend); PTabletsLocation.Builder locationBuilder = PTabletsLocation.newBuilder() .setHost(backend.getHost()) .setBrpcPort(backend.getBrpcPort()); PTabletsLocation location = locationBuilder.addAllTabletId(tabletIds).build(); locations.add(location); } + // pick 2 random coordinator + Collections.shuffle(coordinatorBackend); + if (!coordinatorBackend.isEmpty()) { + coordinatorBackend = coordinatorBackend.subList(0, Math.min(2, coordinatorBackend.size())); + LOG.debug("pick coordinator backend {}", coordinatorBackend.get(0)); + } PFetchRemoteSchemaRequest.Builder requestBuilder = PFetchRemoteSchemaRequest.newBuilder() .addAllTabletLocation(locations) .setIsCoordinator(true); From f01f759a0387aa2215e532e9f455d17495089233 Mon Sep 17 00:00:00 2001 From: Socrates Date: Mon, 23 Dec 2024 10:59:54 +0800 Subject: [PATCH 45/82] [Fix](ORC) Not push down fixed char type in orc reader (#45484) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What problem does this PR solve? Problem Summary: In Hive, the ORC file format supports fixed-length CHAR types (CHAR(n)) by padding strings with spaces to ensure the fixed length. When data is written into ORC tables, the actual stored value includes additional trailing spaces to meet the defined length. These padded spaces are also considered during the computation of statistics. However, in Doris, fixed-length CHAR types (CHAR(n)) and variable-length VARCHAR types are internally represented as the same type. Doris does not pad CHAR values with spaces and treats them as regular strings. As a result, when Doris reads ORC files generated by Hive and parses the statistics, the differences in the handling of CHAR types between the two systems can lead to inconsistencies or incorrect statistics. ```sql create table fixed_char_table ( i int, c char(2) ) stored as orc; insert into fixed_char_table values(1,'a'),(2,'b '), (3,'cd'); select * from fixed_char_table where c = 'a'; ``` before ```text empty ``` after ```text 1 a ``` If a Hive table undergoes a schema change, such as a column’s type being modified from INT to STRING, predicate pushdown should be disabled in such cases. Performing predicate pushdown under these circumstances may lead to incorrect filtering, as the type mismatch can cause errors or unexpected behavior during query execution. ```sql create table type_changed_table ( id int, name string ) stored as orc; insert into type_changed_table values (1, 'Alice'), (2, 'Bob'), (3, 'Charlie'); ALTER TABLE type_changed_table CHANGE COLUMN id id STRING; select * from type_changed_table where id = '1'; select ``` before ```text empty ``` after ```text 1 a ``` ### Release note [fix](orc) Not push down fixed char type in orc reader #45484 --- be/src/vec/exec/format/orc/vorc_reader.cpp | 24 ++++++--- be/src/vec/exec/format/orc/vorc_reader.h | 4 +- be/src/vec/exec/scan/vfile_scanner.cpp | 10 +--- .../orc_predicate/orc_predicate_table.hql | 16 ++++++ .../data/multi_catalog/orc_predicate/run.sh | 9 ++++ .../hive/test_hive_orc_predicate.out | 29 +++++++++++ .../hive/test_hive_orc_predicate.groovy | 50 +++++++++++++++++++ 7 files changed, 122 insertions(+), 20 deletions(-) create mode 100644 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/orc_predicate_table.hql create mode 100755 docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/run.sh create mode 100644 regression-test/data/external_table_p0/hive/test_hive_orc_predicate.out create mode 100644 regression-test/suites/external_table_p0/hive/test_hive_orc_predicate.groovy diff --git a/be/src/vec/exec/format/orc/vorc_reader.cpp b/be/src/vec/exec/format/orc/vorc_reader.cpp index a1ecb1ae0dcf8b..4d41830668960c 100644 --- a/be/src/vec/exec/format/orc/vorc_reader.cpp +++ b/be/src/vec/exec/format/orc/vorc_reader.cpp @@ -143,7 +143,7 @@ void ORCFileInputStream::read(void* buf, uint64_t length, uint64_t offset) { OrcReader::OrcReader(RuntimeProfile* profile, RuntimeState* state, const TFileScanRangeParams& params, const TFileRangeDesc& range, size_t batch_size, const std::string& ctz, io::IOContext* io_ctx, - bool enable_lazy_mat, std::vector* unsupported_pushdown_types) + bool enable_lazy_mat) : _profile(profile), _state(state), _scan_params(params), @@ -156,8 +156,7 @@ OrcReader::OrcReader(RuntimeProfile* profile, RuntimeState* state, _enable_lazy_mat(enable_lazy_mat), _enable_filter_by_min_max( state == nullptr ? true : state->query_options().enable_orc_filter_by_min_max), - _dict_cols_has_converted(false), - _unsupported_pushdown_types(unsupported_pushdown_types) { + _dict_cols_has_converted(false) { TimezoneUtils::find_cctz_time_zone(ctz, _time_zone); VecDateTimeValue t; t.from_unixtime(0, ctz); @@ -460,7 +459,8 @@ static std::unordered_map TYPEKIND_TO_PRE {orc::TypeKind::DOUBLE, orc::PredicateDataType::FLOAT}, {orc::TypeKind::STRING, orc::PredicateDataType::STRING}, {orc::TypeKind::BINARY, orc::PredicateDataType::STRING}, - {orc::TypeKind::CHAR, orc::PredicateDataType::STRING}, + // should not pust down CHAR type, because CHAR type is fixed length and will be padded + // {orc::TypeKind::CHAR, orc::PredicateDataType::STRING}, {orc::TypeKind::VARCHAR, orc::PredicateDataType::STRING}, {orc::TypeKind::DATE, orc::PredicateDataType::DATE}, {orc::TypeKind::DECIMAL, orc::PredicateDataType::DECIMAL}, @@ -492,8 +492,9 @@ std::tuple convert_to_orc_literal(const orc::Type* type, [[fallthrough]]; case orc::TypeKind::BINARY: [[fallthrough]]; - case orc::TypeKind::CHAR: - [[fallthrough]]; + // should not pust down CHAR type, because CHAR type is fixed length and will be padded + // case orc::TypeKind::CHAR: + // [[fallthrough]]; case orc::TypeKind::VARCHAR: { return std::make_tuple(true, orc::Literal(literal_data.data, literal_data.size)); } @@ -593,7 +594,15 @@ std::tuple OrcReader::_make_orc_lite auto literal_data = literal->get_column_ptr()->get_data_at(0); auto* slot = _tuple_descriptor->slots()[slot_ref->column_id()]; auto slot_type = slot->type(); - switch (slot_type.type) { + auto primitive_type = slot_type.type; + auto src_type = OrcReader::convert_to_doris_type(orc_type).type; + // should not down predicate for string type change from other type + if (src_type != primitive_type && !is_string_type(src_type) && is_string_type(primitive_type)) { + LOG(WARNING) << "Unsupported Push Down Schema Changed Column " << primitive_type << " to " + << src_type; + return std::make_tuple(false, orc::Literal(false), orc::PredicateDataType::LONG); + } + switch (primitive_type) { #define M(NAME) \ case TYPE_##NAME: { \ auto [valid, orc_literal] = convert_to_orc_literal( \ @@ -606,7 +615,6 @@ std::tuple OrcReader::_make_orc_lite M(INT) \ M(BIGINT) \ M(LARGEINT) \ - M(CHAR) \ M(DATE) \ M(DATETIME) \ M(DATEV2) \ diff --git a/be/src/vec/exec/format/orc/vorc_reader.h b/be/src/vec/exec/format/orc/vorc_reader.h index 0dd19077bcf0af..6bbf3bead1efce 100644 --- a/be/src/vec/exec/format/orc/vorc_reader.h +++ b/be/src/vec/exec/format/orc/vorc_reader.h @@ -129,8 +129,7 @@ class OrcReader : public GenericReader { OrcReader(RuntimeProfile* profile, RuntimeState* state, const TFileScanRangeParams& params, const TFileRangeDesc& range, size_t batch_size, const std::string& ctz, - io::IOContext* io_ctx, bool enable_lazy_mat = true, - std::vector* unsupported_pushdown_types = nullptr); + io::IOContext* io_ctx, bool enable_lazy_mat = true); OrcReader(const TFileScanRangeParams& params, const TFileRangeDesc& range, const std::string& ctz, io::IOContext* io_ctx, bool enable_lazy_mat = true); @@ -639,7 +638,6 @@ class OrcReader : public GenericReader { std::unique_ptr _string_dict_filter; bool _dict_cols_has_converted = false; bool _has_complex_type = false; - std::vector* _unsupported_pushdown_types; // resolve schema change std::unordered_map> _converters; diff --git a/be/src/vec/exec/scan/vfile_scanner.cpp b/be/src/vec/exec/scan/vfile_scanner.cpp index 76639e4bed4a28..93a22d1a94bf52 100644 --- a/be/src/vec/exec/scan/vfile_scanner.cpp +++ b/be/src/vec/exec/scan/vfile_scanner.cpp @@ -879,17 +879,9 @@ Status VFileScanner::_get_next_reader() { break; } case TFileFormatType::FORMAT_ORC: { - std::vector* unsupported_pushdown_types = nullptr; - if (range.__isset.table_format_params && - range.table_format_params.table_format_type == "paimon") { - static std::vector paimon_unsupport_type = - std::vector {orc::TypeKind::CHAR}; - unsupported_pushdown_types = &paimon_unsupport_type; - } std::unique_ptr orc_reader = OrcReader::create_unique( _profile, _state, *_params, range, _state->query_options().batch_size, - _state->timezone(), _io_ctx.get(), _state->query_options().enable_orc_lazy_mat, - unsupported_pushdown_types); + _state->timezone(), _io_ctx.get(), _state->query_options().enable_orc_lazy_mat); orc_reader->set_push_down_agg_type(_get_push_down_agg_type()); if (push_down_predicates) { RETURN_IF_ERROR(_process_late_arrival_conjuncts()); diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/orc_predicate_table.hql b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/orc_predicate_table.hql new file mode 100644 index 00000000000000..a946b25ff1af04 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/orc_predicate_table.hql @@ -0,0 +1,16 @@ +CREATE DATABASE IF NOT EXISTS multi_catalog; +USE multi_catalog; + +create table fixed_char_table ( + i int, + c char(2) +) stored as orc; + +insert into fixed_char_table values(1,'a'),(2,'b '), (3,'cd'); + +create table type_changed_table ( + id int, + name string +) stored as orc; +insert into type_changed_table values (1, 'Alice'), (2, 'Bob'), (3, 'Charlie'); +ALTER TABLE type_changed_table CHANGE COLUMN id id STRING; diff --git a/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/run.sh b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/run.sh new file mode 100755 index 00000000000000..f934ff3009c6f2 --- /dev/null +++ b/docker/thirdparties/docker-compose/hive/scripts/data/multi_catalog/orc_predicate/run.sh @@ -0,0 +1,9 @@ +#!/bin/bash +set -x + +CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" + +# create table +hive -f "${CUR_DIR}"/orc_predicate_table.hql + + diff --git a/regression-test/data/external_table_p0/hive/test_hive_orc_predicate.out b/regression-test/data/external_table_p0/hive/test_hive_orc_predicate.out new file mode 100644 index 00000000000000..f42bb629550c88 --- /dev/null +++ b/regression-test/data/external_table_p0/hive/test_hive_orc_predicate.out @@ -0,0 +1,29 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !predicate_fixed_char1 -- +1 a + +-- !predicate_fixed_char2 -- + +-- !predicate_changed_type1 -- +1 Alice + +-- !predicate_changed_type2 -- +2 Bob + +-- !predicate_changed_type3 -- +3 Charlie + +-- !predicate_fixed_char1 -- +1 a + +-- !predicate_fixed_char2 -- + +-- !predicate_changed_type1 -- +1 Alice + +-- !predicate_changed_type2 -- +2 Bob + +-- !predicate_changed_type3 -- +3 Charlie + diff --git a/regression-test/suites/external_table_p0/hive/test_hive_orc_predicate.groovy b/regression-test/suites/external_table_p0/hive/test_hive_orc_predicate.groovy new file mode 100644 index 00000000000000..2dd647aa2c1d8e --- /dev/null +++ b/regression-test/suites/external_table_p0/hive/test_hive_orc_predicate.groovy @@ -0,0 +1,50 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_hive_orc_predicate", "p0,external,hive,external_docker,external_docker_hive") { + + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("diable Hive test.") + return; + } + + for (String hivePrefix : ["hive2", "hive3"]) { + try { + String hms_port = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_predicate" + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + + sql """drop catalog if exists ${catalog_name}""" + sql """create catalog if not exists ${catalog_name} properties ( + "type"="hms", + 'hive.metastore.uris' = 'thrift://${externalEnvIp}:${hms_port}' + );""" + sql """use `${catalog_name}`.`multi_catalog`""" + + qt_predicate_fixed_char1 """ select * from fixed_char_table where c = 'a';""" + qt_predicate_fixed_char2 """ select * from fixed_char_table where c = 'a ';""" + + qt_predicate_changed_type1 """ select * from type_changed_table where id = '1';""" + qt_predicate_changed_type2 """ select * from type_changed_table where id = '2';""" + qt_predicate_changed_type3 """ select * from type_changed_table where id = '3';""" + + sql """drop catalog if exists ${catalog_name}""" + } finally { + } + } +} From e7d2fedd1ce8abc276476d92eacfa882f318fcdc Mon Sep 17 00:00:00 2001 From: 924060929 Date: Mon, 23 Dec 2024 11:17:45 +0800 Subject: [PATCH 46/82] [opt](nereids) optimize rewrite of synchronize materialize view (#45748) optimize rewrite of synchronize materialize view 1. cache toSql 2. fast parse UnboundSlot in NereidsParser.parseExpression --- .../doris/common/profile/SummaryProfile.java | 4 +- .../doris/nereids/analyzer/MappingSlot.java | 2 +- .../doris/nereids/analyzer/UnboundAlias.java | 2 +- .../nereids/analyzer/UnboundFunction.java | 2 +- .../doris/nereids/analyzer/UnboundSlot.java | 14 ++++- .../doris/nereids/analyzer/UnboundStar.java | 2 +- .../doris/nereids/parser/NereidsParser.java | 53 +++++++++++++++++++ .../AbstractSelectMaterializedIndexRule.java | 5 +- .../SelectMaterializedIndexWithAggregate.java | 6 ++- .../expressions/AggregateExpression.java | 2 +- .../nereids/trees/expressions/Alias.java | 2 +- .../trees/expressions/ArrayItemReference.java | 2 +- .../trees/expressions/BinaryOperator.java | 2 +- .../nereids/trees/expressions/BoundStar.java | 2 +- .../nereids/trees/expressions/CaseWhen.java | 2 +- .../doris/nereids/trees/expressions/Cast.java | 2 +- .../trees/expressions/CompoundPredicate.java | 2 +- .../nereids/trees/expressions/Exists.java | 4 +- .../nereids/trees/expressions/Expression.java | 10 ++++ .../trees/expressions/InPredicate.java | 2 +- .../nereids/trees/expressions/InSubquery.java | 4 +- .../nereids/trees/expressions/IsNull.java | 2 +- .../nereids/trees/expressions/ListQuery.java | 4 +- .../nereids/trees/expressions/Match.java | 2 +- .../doris/nereids/trees/expressions/Not.java | 2 +- .../trees/expressions/OrderExpression.java | 2 +- .../trees/expressions/Placeholder.java | 2 +- .../nereids/trees/expressions/Properties.java | 2 +- .../trees/expressions/ScalarSubquery.java | 4 +- .../trees/expressions/SlotReference.java | 2 +- .../expressions/StringRegexPredicate.java | 2 +- .../trees/expressions/SubqueryExpr.java | 2 +- .../expressions/TimestampArithmetic.java | 2 +- .../trees/expressions/UnaryOperator.java | 2 +- .../nereids/trees/expressions/Variable.java | 2 +- .../trees/expressions/VariableDesc.java | 2 +- .../expressions/VirtualSlotReference.java | 2 +- .../nereids/trees/expressions/WhenClause.java | 2 +- .../trees/expressions/WindowExpression.java | 2 +- .../trees/expressions/WindowFrame.java | 2 +- .../expressions/functions/BoundFunction.java | 2 +- .../functions/agg/AggregateFunction.java | 2 +- .../expressions/functions/agg/Count.java | 4 +- .../functions/scalar/CryptoFunction.java | 2 +- .../expressions/functions/scalar/Lambda.java | 2 +- .../functions/table/TableValuedFunction.java | 2 +- .../expressions/literal/ArrayLiteral.java | 2 +- .../expressions/literal/DateLiteral.java | 2 +- .../expressions/literal/DateTimeLiteral.java | 2 +- .../expressions/literal/DecimalLiteral.java | 2 +- .../expressions/literal/DecimalV3Literal.java | 2 +- .../trees/expressions/literal/Literal.java | 2 +- .../trees/expressions/literal/MapLiteral.java | 2 +- .../trees/expressions/literal/MaxLiteral.java | 2 +- .../expressions/literal/StructLiteral.java | 2 +- .../plans/distribute/DistributePlanner.java | 1 + .../org/apache/doris/nereids/util/Utils.java | 12 ++++- 57 files changed, 151 insertions(+), 62 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java b/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java index 6a92e043b6eb20..5b0d5ba353387f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java +++ b/fe/fe-core/src/main/java/org/apache/doris/common/profile/SummaryProfile.java @@ -600,7 +600,9 @@ public void setQueryDistributedFinishTime() { } public void setQueryPlanFinishTime() { - this.queryPlanFinishTime = TimeUtils.getStartTimeMs(); + if (queryPlanFinishTime == -1) { + this.queryPlanFinishTime = TimeUtils.getStartTimeMs(); + } } public void setQueryScheduleFinishTime() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/MappingSlot.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/MappingSlot.java index c7a020fd2abddf..2e9e84195508b1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/MappingSlot.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/MappingSlot.java @@ -77,7 +77,7 @@ public boolean nullable() { } @Override - public String toSql() { + public String computeToSql() { return slot.toSql(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundAlias.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundAlias.java index 2be2130aba71d4..25d40dd5981194 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundAlias.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundAlias.java @@ -59,7 +59,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("(" + child() + ")"); alias.ifPresent(name -> stringBuilder.append(" AS " + name)); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundFunction.java index a53917f08cd8d0..b4b21e40dcd4df 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundFunction.java @@ -115,7 +115,7 @@ public List getArguments() { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { String params = children.stream() .map(Expression::toSql) .collect(Collectors.joining(", ")); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundSlot.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundSlot.java index f85812569804aa..fdcb9547837686 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundSlot.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundSlot.java @@ -81,8 +81,18 @@ public String getInternalName() { } @Override - public String toSql() { - return nameParts.stream().map(Utils::quoteIfNeeded).reduce((left, right) -> left + "." + right).orElse(""); + public String computeToSql() { + switch (nameParts.size()) { + case 1: return Utils.quoteIfNeeded(nameParts.get(0)); + case 2: return Utils.quoteIfNeeded(nameParts.get(0)) + "." + Utils.quoteIfNeeded(nameParts.get(1)); + case 3: return Utils.quoteIfNeeded(nameParts.get(0)) + "." + Utils.quoteIfNeeded(nameParts.get(1)) + + "." + Utils.quoteIfNeeded(nameParts.get(2)); + default: { + return nameParts.stream().map(Utils::quoteIfNeeded) + .reduce((left, right) -> left + "." + right) + .orElse(""); + } + } } public static UnboundSlot quoted(String name) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundStar.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundStar.java index 6d8ed904ec109d..cee6a0105f87c7 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundStar.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundStar.java @@ -98,7 +98,7 @@ public UnboundStar(List qualifier, List exceptedSlots, } @Override - public String toSql() { + public String computeToSql() { StringBuilder builder = new StringBuilder(); builder.append(Utils.qualifiedName(qualifier, "*")); if (!exceptedSlots.isEmpty()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java index 34646c1d657953..4ed71bbbc14673 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java @@ -23,7 +23,9 @@ import org.apache.doris.common.Pair; import org.apache.doris.nereids.DorisLexer; import org.apache.doris.nereids.DorisParser; +import org.apache.doris.nereids.DorisParser.NonReservedContext; import org.apache.doris.nereids.StatementContext; +import org.apache.doris.nereids.analyzer.UnboundSlot; import org.apache.doris.nereids.glue.LogicalPlanAdapter; import org.apache.doris.nereids.parser.plsql.PLSqlLogicalPlanBuilder; import org.apache.doris.nereids.trees.expressions.Expression; @@ -35,6 +37,8 @@ import org.apache.doris.qe.ConnectContext; import org.apache.doris.qe.SessionVariable; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.antlr.v4.runtime.CharStreams; @@ -45,14 +49,17 @@ import org.antlr.v4.runtime.TokenSource; import org.antlr.v4.runtime.atn.PredictionMode; import org.antlr.v4.runtime.misc.ParseCancellationException; +import org.antlr.v4.runtime.tree.TerminalNode; import org.apache.commons.collections.CollectionUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import java.lang.reflect.Method; import java.util.BitSet; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.function.Function; import javax.annotation.Nullable; @@ -66,6 +73,9 @@ public class NereidsParser { private static final BitSet EXPLAIN_TOKENS = new BitSet(); + private static final Set NON_RESERVED_KEYWORDS; + private static final Map LITERAL_TOKENS; + static { EXPLAIN_TOKENS.set(DorisLexer.EXPLAIN); EXPLAIN_TOKENS.set(DorisLexer.PARSED); @@ -77,6 +87,25 @@ public class NereidsParser { EXPLAIN_TOKENS.set(DorisLexer.PLAN); EXPLAIN_TOKENS.set(DorisLexer.PROCESS); + ImmutableSet.Builder nonReserveds = ImmutableSet.builder(); + for (Method declaredMethod : NonReservedContext.class.getDeclaredMethods()) { + if (TerminalNode.class.equals(declaredMethod.getReturnType()) + && declaredMethod.getName().toUpperCase().equals(declaredMethod.getName()) + && declaredMethod.getParameterTypes().length == 0) { + String nonReserved = declaredMethod.getName(); + nonReserveds.add(nonReserved); + } + } + NON_RESERVED_KEYWORDS = nonReserveds.build(); + + ImmutableMap.Builder literalToTokenType = ImmutableMap.builder(); + for (int tokenType = 0; tokenType <= DorisLexer.VOCABULARY.getMaxTokenType(); tokenType++) { + String literalName = DorisLexer.VOCABULARY.getLiteralName(tokenType); + if (literalName != null) { + literalToTokenType.put(literalName.substring(1, literalName.length() - 1), tokenType); + } + } + LITERAL_TOKENS = literalToTokenType.build(); } /** @@ -256,9 +285,33 @@ public List> parseMultiple(String sql, } public Expression parseExpression(String expression) { + if (isSimpleIdentifier(expression)) { + return new UnboundSlot(expression); + } return parse(expression, DorisParser::expression); } + private static boolean isSimpleIdentifier(String expression) { + if (expression == null || expression.isEmpty()) { + return false; + } + + boolean hasLetter = false; + for (int i = 0; i < expression.length(); i++) { + char c = expression.charAt(i); + if ((('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '_' || c == '$')) { + hasLetter = true; + } else if (!('0' <= c && c <= '9')) { + return false; + } + } + if (!hasLetter) { + return false; + } + String upperCase = expression.toUpperCase(); + return (NON_RESERVED_KEYWORDS.contains(upperCase) || !LITERAL_TOKENS.containsKey(upperCase)); + } + public DataType parseDataType(String dataType) { return parse(dataType, DorisParser::dataType); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/AbstractSelectMaterializedIndexRule.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/AbstractSelectMaterializedIndexRule.java index 357883d1f7136a..3c31ce22e4611a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/AbstractSelectMaterializedIndexRule.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/AbstractSelectMaterializedIndexRule.java @@ -219,8 +219,9 @@ public static String parseMvColumnToMvName(String mvName, Optional aggTy } protected static boolean containsAllColumn(Expression expression, Set mvColumnNames) { - if (mvColumnNames.contains(expression.toSql()) || mvColumnNames - .contains(org.apache.doris.analysis.CreateMaterializedViewStmt.mvColumnBreaker(expression.toSql()))) { + String sql = expression.toSql(); + if (mvColumnNames.contains(sql) || mvColumnNames + .contains(org.apache.doris.analysis.CreateMaterializedViewStmt.mvColumnBreaker(sql))) { return true; } if (expression.children().isEmpty()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/SelectMaterializedIndexWithAggregate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/SelectMaterializedIndexWithAggregate.java index 468b9cf659c5b2..fa03165b37c337 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/SelectMaterializedIndexWithAggregate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/mv/SelectMaterializedIndexWithAggregate.java @@ -625,9 +625,13 @@ private SelectResult select(LogicalOlapScan scan, Set requiredScanOutput, aggFuncsDiff(aggregateFunctions, aggRewriteResult), groupingExprs).isOn()) .collect(Collectors.toSet()); + Set candidatesWithRewritingIndexes = candidatesWithRewriting.stream() + .map(result -> result.index) + .collect(Collectors.toSet()); + Set candidatesWithoutRewriting = indexesGroupByIsBaseOrNot .getOrDefault(false, ImmutableList.of()).stream() - .filter(index -> !candidatesWithRewriting.contains(index)) + .filter(index -> !candidatesWithRewritingIndexes.contains(index)) .filter(index -> preAggEnabledByHint(scan) || checkPreAggStatus(scan, index.getId(), predicates, aggregateFunctions, groupingExprs).isOn()) .collect(Collectors.toSet()); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/AggregateExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/AggregateExpression.java index 2e20dd05180a71..86d7eb72382300 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/AggregateExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/AggregateExpression.java @@ -100,7 +100,7 @@ public AggregateExpression withChildren(List children) { } @Override - public String toSql() { + public String computeToSql() { if (aggregateParam.aggMode.productAggregateBuffer) { return "partial_" + function.toSql(); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Alias.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Alias.java index 9eea3afd879e67..53a82011ac4c3c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Alias.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Alias.java @@ -124,7 +124,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return child().toSql() + " AS `" + name.get() + "`"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ArrayItemReference.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ArrayItemReference.java index c54ad358561d8e..edc074af2b513a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ArrayItemReference.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ArrayItemReference.java @@ -92,7 +92,7 @@ public DataType getDataType() { } @Override - public String toSql() { + public String computeToSql() { return child(0).toSql(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryOperator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryOperator.java index 750f3a77881430..f699e7531f6207 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryOperator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BinaryOperator.java @@ -49,7 +49,7 @@ public List expectedInputTypes() { } @Override - public String toSql() { + public String computeToSql() { return "(" + left().toSql() + " " + symbol + " " + right().toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BoundStar.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BoundStar.java index 8b4bffad3fc817..0789d9a65279db 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BoundStar.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/BoundStar.java @@ -35,7 +35,7 @@ public BoundStar(List children) { ); } - public String toSql() { + public String computeToSql() { return children.stream().map(Expression::toSql).collect(Collectors.joining(", ")); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CaseWhen.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CaseWhen.java index bd48b648a73dfb..0c3687f57153f2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CaseWhen.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CaseWhen.java @@ -111,7 +111,7 @@ public String toString() { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { StringBuilder output = new StringBuilder("CASE"); for (Expression child : children()) { if (child instanceof WhenClause) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java index 9122f0f4adbb0a..20f8079bd9f141 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Cast.java @@ -95,7 +95,7 @@ public Cast withChildren(List children) { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { return "cast(" + child().toSql() + " as " + targetType.toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CompoundPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CompoundPredicate.java index d58d1ba8193de5..9b1535eb9cc3c9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CompoundPredicate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/CompoundPredicate.java @@ -101,7 +101,7 @@ public boolean equals(Object o) { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); children().forEach(c -> sb.append(c.toSql()).append(",")); sb.deleteCharAt(sb.length() - 1); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Exists.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Exists.java index 3d3bd17c70e12c..8d097d0faa6f45 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Exists.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Exists.java @@ -65,8 +65,8 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { - return "EXISTS (SUBQUERY) " + super.toSql(); + public String computeToSql() { + return "EXISTS (SUBQUERY) " + super.computeToSql(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Expression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Expression.java index e20290e8b59c41..6634d5e0160ead 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Expression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Expression.java @@ -20,6 +20,7 @@ import org.apache.doris.nereids.analyzer.Unbound; import org.apache.doris.nereids.analyzer.UnboundVariable; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.exceptions.UnboundException; import org.apache.doris.nereids.trees.AbstractTreeNode; import org.apache.doris.nereids.trees.expressions.ArrayItemReference.ArrayItemSlot; import org.apache.doris.nereids.trees.expressions.functions.ExpressionTrait; @@ -68,6 +69,7 @@ public abstract class Expression extends AbstractTreeNode implements private final Supplier> inputSlots = Suppliers.memoize( () -> collect(e -> e instanceof Slot && !(e instanceof ArrayItemSlot))); private final int fastChildrenHashCode; + private final Supplier toSqlCache = Suppliers.memoize(this::computeToSql); protected Expression(Expression... children) { super(children); @@ -210,6 +212,10 @@ public int fastChildrenHashCode() { return fastChildrenHashCode; } + protected String computeToSql() { + throw new UnboundException("sql"); + } + protected TypeCheckResult checkInputDataTypesInternal() { return TypeCheckResult.SUCCESS; } @@ -301,6 +307,10 @@ public boolean isInferred() { return inferred; } + public final String toSql() { + return toSqlCache.get(); + } + @Override public Expression withChildren(List children) { throw new RuntimeException(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InPredicate.java index 53a753c4535dd1..b8c0cf54471901 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InPredicate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InPredicate.java @@ -122,7 +122,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { return compareExpr.toSql() + " IN " + options.stream() .map(Expression::toSql).sorted() .collect(Collectors.joining(", ", "(", ")")); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InSubquery.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InSubquery.java index 8b7d0518181fda..71dc1f5eb4f08f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InSubquery.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/InSubquery.java @@ -77,8 +77,8 @@ public boolean nullable() throws UnboundException { } @Override - public String toSql() { - return this.compareExpr.toSql() + " IN (" + super.toSql() + ")"; + public String computeToSql() { + return this.compareExpr.toSql() + " IN (" + super.computeToSql() + ")"; } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/IsNull.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/IsNull.java index 7bb8538fc75031..22216a84bafe87 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/IsNull.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/IsNull.java @@ -55,7 +55,7 @@ public IsNull withChildren(List children) { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { return child().toSql() + " IS NULL"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ListQuery.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ListQuery.java index 214525d2594580..16dade740b9422 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ListQuery.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ListQuery.java @@ -48,8 +48,8 @@ public DataType getDataType() { } @Override - public String toSql() { - return " (LISTQUERY) " + super.toSql(); + public String computeToSql() { + return " (LISTQUERY) " + super.computeToSql(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Match.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Match.java index d9dcde287d3884..405e3cb8fe4612 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Match.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Match.java @@ -76,7 +76,7 @@ public boolean nullable() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return "(" + left().toSql() + " " + symbol + " " + right().toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Not.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Not.java index 5061cab5ac9631..b001da9118fea3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Not.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Not.java @@ -102,7 +102,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { return "( not " + child().toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/OrderExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/OrderExpression.java index d09fe2c0a00ed3..7e33d4315d81ad 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/OrderExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/OrderExpression.java @@ -81,7 +81,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { return orderKey.toSql(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Placeholder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Placeholder.java index c79c2d9db6d0e9..3ce8cdb017f9b8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Placeholder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Placeholder.java @@ -66,7 +66,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { return "?"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Properties.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Properties.java index db0c78c1f78f57..d604e919e31d17 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Properties.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Properties.java @@ -56,7 +56,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return getMap() .entrySet() .stream() diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ScalarSubquery.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ScalarSubquery.java index 178debe7db83a5..25a7052a4acabc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ScalarSubquery.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/ScalarSubquery.java @@ -84,8 +84,8 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { - return " (SCALARSUBQUERY) " + super.toSql(); + public String computeToSql() { + return " (SCALARSUBQUERY) " + super.computeToSql(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SlotReference.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SlotReference.java index e90bc3a5ecfaf4..890fbdfdb96f72 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SlotReference.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SlotReference.java @@ -183,7 +183,7 @@ public Optional getTable() { } @Override - public String toSql() { + public String computeToSql() { if (subPath.isEmpty()) { return name.get(); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/StringRegexPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/StringRegexPredicate.java index 8900ac928590c3..5a62be54f93d81 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/StringRegexPredicate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/StringRegexPredicate.java @@ -55,7 +55,7 @@ public List getSignatures() { } @Override - public String toSql() { + public String computeToSql() { return '(' + left().toSql() + ' ' + getName() + ' ' + right().toSql() + ')'; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SubqueryExpr.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SubqueryExpr.java index 35d0e566476880..c08fda1dc6b713 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SubqueryExpr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/SubqueryExpr.java @@ -80,7 +80,7 @@ public boolean nullable() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return "(" + queryPlan + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/TimestampArithmetic.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/TimestampArithmetic.java index d3e326fa48a574..40a727eb1757ba 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/TimestampArithmetic.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/TimestampArithmetic.java @@ -129,7 +129,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder strBuilder = new StringBuilder(); if (funcName != null) { // Function-call like version. diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/UnaryOperator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/UnaryOperator.java index ace2c648daec79..61efa91f2621fc 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/UnaryOperator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/UnaryOperator.java @@ -46,7 +46,7 @@ public List expectedInputTypes() { } @Override - public String toSql() { + public String computeToSql() { return "(" + symbol + " " + child().toSql() + ")"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Variable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Variable.java index fd16b84b183c7b..5944ec08744980 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Variable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/Variable.java @@ -85,7 +85,7 @@ public String toString() throws UnboundException { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { return toString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VariableDesc.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VariableDesc.java index 38f23ee40fafe9..3a16b38f9e19e0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VariableDesc.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VariableDesc.java @@ -50,7 +50,7 @@ public SetType getSetType() { } @Override - public String toSql() { + public String computeToSql() { return toString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VirtualSlotReference.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VirtualSlotReference.java index 43f4853758105c..1b46a8552bafb9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VirtualSlotReference.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/VirtualSlotReference.java @@ -82,7 +82,7 @@ public R accept(ExpressionVisitor visitor, C context) { } @Override - public String toSql() { + public String computeToSql() { return getName(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WhenClause.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WhenClause.java index 4ce77f22df1692..adb862bb2f1041 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WhenClause.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WhenClause.java @@ -56,7 +56,7 @@ public Expression getResult() { } @Override - public String toSql() { + public String computeToSql() { return " WHEN " + left().toSql() + " THEN " + right().toSql(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowExpression.java index 5bea07fff00326..7f26298c700626 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowExpression.java @@ -179,7 +179,7 @@ public int hashCode() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); sb.append(function.toSql()).append(" OVER("); if (!partitionKeys.isEmpty()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowFrame.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowFrame.java index 5cbb93ce3748ea..58ed4f15f9baa0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowFrame.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/WindowFrame.java @@ -95,7 +95,7 @@ public int hashCode() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); sb.append(frameUnits + " "); if (rightBoundary != null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/BoundFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/BoundFunction.java index 5ccc64a34bb43b..13d4b515ad75f3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/BoundFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/BoundFunction.java @@ -85,7 +85,7 @@ public int hashCode() { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { StringBuilder sql = new StringBuilder(getName()).append("("); int arity = arity(); for (int i = 0; i < arity; i++) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/AggregateFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/AggregateFunction.java index 90df2f531da3fb..777c9c4cc7add5 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/AggregateFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/AggregateFunction.java @@ -107,7 +107,7 @@ public boolean hasVarArguments() { } @Override - public String toSql() throws UnboundException { + public String computeToSql() throws UnboundException { StringBuilder sql = new StringBuilder(getName()).append("("); if (distinct) { sql.append("DISTINCT "); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/Count.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/Count.java index e86e90974da1bd..21e6ee1cba6b21 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/Count.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/agg/Count.java @@ -119,11 +119,11 @@ public Count withDistinctAndChildren(boolean distinct, List children } @Override - public String toSql() { + public String computeToSql() { if (isStar) { return "count(*)"; } - return super.toSql(); + return super.computeToSql(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CryptoFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CryptoFunction.java index 151f7ffc7732be..1e4a866ecdcc53 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CryptoFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/CryptoFunction.java @@ -42,7 +42,7 @@ public CryptoFunction(String name, List arguments) { } @Override - public String toSql() { + public String computeToSql() { List args = Lists.newArrayList(); for (int i = 0; i < arity(); i++) { if (i == 1) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java index e8261f6391dda9..2ecab6090d8d3f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/scalar/Lambda.java @@ -126,7 +126,7 @@ public boolean equals(Object o) { } @Override - public String toSql() { + public String computeToSql() { StringBuilder builder = new StringBuilder(); String argStr = argumentNames.get(0); if (argumentNames.size() > 1) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/TableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/TableValuedFunction.java index 837edf27ab1067..4a4257e67609d0 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/TableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/functions/table/TableValuedFunction.java @@ -130,7 +130,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { String args = getTVFProperties() .getMap() .entrySet() diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java index 486eeddabd71c8..be84a5b32e35cf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/ArrayLiteral.java @@ -94,7 +94,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { String items = this.items.stream() .map(Literal::toSql) .collect(Collectors.joining(", ")); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java index 6ea1d2af725679..ed99e3025e8603 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java @@ -423,7 +423,7 @@ public String getStringValue() { } @Override - public String toSql() { + public String computeToSql() { return "'" + getStringValue() + "'"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java index 0a5c02409c113a..169ed421934824 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java @@ -266,7 +266,7 @@ public double getDouble() { } @Override - public String toSql() { + public String computeToSql() { return "'" + getStringValue() + "'"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalLiteral.java index 4ffc92c634d709..1f0aa788cdc641 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalLiteral.java @@ -112,7 +112,7 @@ public boolean equals(Object o) { } @Override - public String toSql() { + public String computeToSql() { return value.toPlainString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalV3Literal.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalV3Literal.java index d8be4faf0c9395..045da28bdb38a4 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalV3Literal.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DecimalV3Literal.java @@ -152,7 +152,7 @@ public boolean equals(Object o) { } @Override - public String toSql() { + public String computeToSql() { return value.toPlainString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/Literal.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/Literal.java index e8e37aaf697e24..69e61b03c82314 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/Literal.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/Literal.java @@ -135,7 +135,7 @@ public DataType getDataType() throws UnboundException { } @Override - public String toSql() { + public String computeToSql() { return toString(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java index c57bd3a04875e1..dbcf74c971e069 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MapLiteral.java @@ -114,7 +114,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); sb.append("map("); if (!keys.isEmpty()) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MaxLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MaxLiteral.java index ce1278a9ad4b26..763fdfb1f4f1f8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MaxLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/MaxLiteral.java @@ -38,7 +38,7 @@ public LiteralExpr toLegacyLiteral() { } @Override - public String toSql() { + public String computeToSql() { return "MAX_VALUE"; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java index 3a46f1f5b83e7e..f44aa663c9eb03 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/StructLiteral.java @@ -124,7 +124,7 @@ public String toString() { } @Override - public String toSql() { + public String computeToSql() { StringBuilder sb = new StringBuilder(); sb.append("STRUCT("); for (int i = 0; i < fields.size(); i++) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/DistributePlanner.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/DistributePlanner.java index 12ab8b42eaab61..75a2326236fc9b 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/DistributePlanner.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/distribute/DistributePlanner.java @@ -73,6 +73,7 @@ public DistributePlanner(StatementContext statementContext, List f /** plan */ public FragmentIdMapping plan() { + updateProfileIfPresent(SummaryProfile::setQueryPlanFinishTime); try { FragmentIdMapping fragmentJobs = UnassignedJobBuilder.buildJobs(statementContext, idToFragments); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/Utils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/Utils.java index 42b99f6effdb84..c111839fc5093e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/Utils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/Utils.java @@ -58,8 +58,16 @@ public class Utils { */ public static String quoteIfNeeded(String part) { // We quote strings except the ones which consist of digits only. - return part.matches("\\w*[\\w&&[^\\d]]+\\w*") - ? part : part.replace("`", "``"); + StringBuilder quote = new StringBuilder(part.length()); + for (int i = 0; i < part.length(); i++) { + char c = part.charAt(i); + if (c == '`') { + quote.append("``"); + } else { + quote.append(c); + } + } + return quote.toString(); } /** From c2e048f71c3edf75bb352dbaeeedd087c8df1264 Mon Sep 17 00:00:00 2001 From: yujun Date: Mon, 23 Dec 2024 11:28:30 +0800 Subject: [PATCH 47/82] [fix](nereids) fix comparison with date like (#45735) ### What problem does this PR solve? Issue Number: close #xxx Related PR: #45382 Problem Summary: #45382 had fix compare date/datev1 with datetime literal wrong cutting. but it not fix completely. ``` if (right instanceof DateTimeLiteral) { DateTimeLiteral dateTimeLiteral = (DateTimeLiteral) right; right = migrateToDateV2(dateTimeLiteral); if (dateTimeLiteral.getHour() != 0 || dateTimeLiteral.getMinute() != 0 || dateTimeLiteral.getSecond() != 0) { ... } } ``` For the above code, if check right is date time literal, but notice that datetimev2 literal is datetime literal's child class. so datetimev2 literal will also run the above code. And datetimev2 literal should check its microseconds not equals to 0. for example: `date_a = '2020-01-01 00:00:00.01'` should opt as `FALSE`, but not `date_a = '2020-01-01'`. --- .../rules/SimplifyComparisonPredicate.java | 2 +- .../SimplifyComparisonPredicateTest.java | 24 +++++++++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicate.java index fbe0d44417363a..7dc9ddcb3971ef 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicate.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicate.java @@ -182,7 +182,7 @@ private static Expression processDateLikeTypeCoercion(ComparisonPredicate cp, Ex DateTimeLiteral dateTimeLiteral = (DateTimeLiteral) right; right = migrateToDateV2(dateTimeLiteral); if (dateTimeLiteral.getHour() != 0 || dateTimeLiteral.getMinute() != 0 - || dateTimeLiteral.getSecond() != 0) { + || dateTimeLiteral.getSecond() != 0 || dateTimeLiteral.getMicroSecond() != 0) { if (cp instanceof EqualTo) { return ExpressionUtils.falseOrNull(cast.child()); } else if (cp instanceof NullSafeEqual) { diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java index 028f1c4864f099..bab3b4385137e8 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java @@ -166,6 +166,18 @@ void testDateTimeV2CmpDateTimeV2() { new LessThan(date, new DateV2Literal("2020-01-02"))); assertRewrite(new LessThanEqual(new Cast(date, DateTimeType.INSTANCE), new DateTimeLiteral("2020-01-01 00:00:01")), new LessThanEqual(date, new DateV2Literal("2020-01-01"))); + assertRewrite(new EqualTo(new Cast(date, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:00")), + new EqualTo(date, new DateV2Literal("2020-01-01"))); + assertRewrite(new EqualTo(new Cast(date, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:01")), + ExpressionUtils.falseOrNull(date)); + assertRewrite(new EqualTo(new Cast(date, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + ExpressionUtils.falseOrNull(date)); + assertRewrite(new NullSafeEqual(new Cast(date, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThanEqual(new Cast(date, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:01")), + new GreaterThanEqual(date, new DateV2Literal("2020-01-02"))); + assertRewrite(new GreaterThanEqual(new Cast(date, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + new GreaterThanEqual(date, new DateV2Literal("2020-01-02"))); // cast (date as datev1) = datev1-literal // assertRewrite(new EqualTo(new Cast(date, DateType.INSTANCE), new DateLiteral("2020-01-01")), // new EqualTo(date, new DateV2Literal("2020-01-01"))); @@ -191,6 +203,18 @@ void testDateTimeV2CmpDateTimeV2() { new EqualTo(datev1, new DateLiteral("2020-01-01"))); assertRewrite(new GreaterThan(new Cast(datev1, DateV2Type.INSTANCE), new DateV2Literal("2020-01-01")), new GreaterThan(datev1, new DateLiteral("2020-01-01"))); + assertRewrite(new EqualTo(new Cast(datev1, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:00")), + new EqualTo(datev1, new DateLiteral("2020-01-01"))); + assertRewrite(new EqualTo(new Cast(datev1, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:01")), + ExpressionUtils.falseOrNull(datev1)); + assertRewrite(new EqualTo(new Cast(datev1, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + ExpressionUtils.falseOrNull(datev1)); + assertRewrite(new NullSafeEqual(new Cast(datev1, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThanEqual(new Cast(datev1, DateTimeV2Type.SYSTEM_DEFAULT), new DateTimeV2Literal("2020-01-01 00:00:01")), + new GreaterThanEqual(datev1, new DateLiteral("2020-01-02"))); + assertRewrite(new GreaterThanEqual(new Cast(datev1, DateTimeV2Type.of(2)), new DateTimeV2Literal("2020-01-01 00:00:00.01")), + new GreaterThanEqual(datev1, new DateLiteral("2020-01-02"))); // cast (datetimev1 as datetime) cmp datetime assertRewrite(new EqualTo(new Cast(datetimev1, DateTimeV2Type.of(0)), new DateTimeV2Literal("2020-01-01 00:00:00")), From 208fde0648110f3a6d2837b43bbd8c19c5c51d33 Mon Sep 17 00:00:00 2001 From: Mryange Date: Mon, 23 Dec 2024 11:33:56 +0800 Subject: [PATCH 48/82] [refine](Column)Disallow implicit conversion of ColumnPtr to IColumn* (#45588) ### What problem does this PR solve? Previously, we allowed ColumnPtr to be directly converted to Column*: ```C++ ColumnPtr column; const IColumn* ptr = column; ``` This can easily cause confusion. For example, in the following code: ```C++ ColumnPtr column; const auto& const_column = check_and_get_column(column); ``` The matched function is: ```C++ template <> const doris::vectorized::ColumnConst* check_and_get_column( const IColumn* column) ``` However, the actual type of const_column is: ```C++ const doris::vectorized::ColumnConst* const& ``` ### Release note None ### Check List (For Author) - Test - [ ] Regression test - [ ] Unit Test - [ ] Manual test (add detailed scripts or steps below) - [x] No need to test or manual test. Explain why: - [x] This is a refactor/code format and no logic has been changed. - [ ] Previous test can cover this change. - [ ] No code files have been changed. - [ ] Other reason - Behavior changed: - [x] No. - [ ] Yes. - Does this need documentation? - [x] No. - [ ] Yes. ### Check List (For Reviewer who merge this PR) - [ ] Confirm the release note - [ ] Confirm test cases - [ ] Confirm document - [ ] Add branch pick label --- be/src/exec/table_connector.cpp | 7 ++-- be/src/olap/push_handler.cpp | 2 +- .../olap/rowset/segment_v2/column_reader.cpp | 13 +++--- .../segment_v2/hierarchical_data_reader.h | 4 +- .../rowset/segment_v2/segment_iterator.cpp | 5 +-- be/src/olap/schema_change.cpp | 14 ++++--- be/src/pipeline/exec/hashjoin_build_sink.cpp | 2 +- .../pipeline/exec/hashjoin_probe_operator.cpp | 4 +- be/src/pipeline/exec/join_probe_operator.cpp | 2 +- be/src/pipeline/exec/olap_scan_operator.cpp | 5 +-- be/src/pipeline/exec/scan_operator.cpp | 8 ++-- .../aggregate_function_window.h | 2 +- be/src/vec/columns/column_object.cpp | 6 +-- be/src/vec/common/cow.h | 6 +-- .../vec/exec/format/column_type_convert.cpp | 2 +- be/src/vec/exec/format/csv/csv_reader.cpp | 4 +- .../vec/exec/format/json/new_json_reader.cpp | 23 +++++----- .../format/parquet/parquet_column_convert.cpp | 2 +- be/src/vec/exec/format/wal/wal_reader.cpp | 2 +- be/src/vec/exec/jni_connector.cpp | 2 +- be/src/vec/exec/scan/vfile_scanner.cpp | 2 +- be/src/vec/exec/scan/vmeta_scanner.cpp | 2 +- be/src/vec/exprs/vcompound_pred.h | 6 ++- .../array/function_array_cum_sum.cpp | 2 +- .../functions/array/function_array_distinct.h | 4 +- .../array/function_array_enumerate.cpp | 5 ++- .../array/function_array_enumerate_uniq.cpp | 7 ++-- .../vec/functions/array/function_array_join.h | 7 ++-- .../vec/functions/array/function_array_map.h | 2 +- .../functions/array/function_array_pop.cpp | 2 +- .../functions/array/function_array_range.cpp | 2 +- .../functions/array/function_array_remove.h | 8 ++-- .../functions/array/function_array_reverse.h | 2 +- .../vec/functions/array/function_array_set.h | 4 +- .../functions/array/function_array_slice.h | 2 +- .../functions/array/function_array_sortby.cpp | 6 +-- .../functions/array/function_arrays_overlap.h | 8 ++-- .../functions/comparison_equal_for_null.cpp | 10 +++-- be/src/vec/functions/function_agg_state.h | 2 +- .../functions/function_binary_arithmetic.h | 28 +++++++------ be/src/vec/functions/function_bitmap.cpp | 2 +- .../functions/function_bitmap_variadic.cpp | 2 +- be/src/vec/functions/function_case.h | 2 +- be/src/vec/functions/function_cast.h | 2 +- be/src/vec/functions/function_collection_in.h | 5 ++- .../function_date_or_datetime_computation.h | 4 +- be/src/vec/functions/function_ip.h | 6 ++- be/src/vec/functions/function_jsonb.cpp | 26 +++++++----- be/src/vec/functions/function_nullables.cpp | 5 ++- .../vec/functions/function_quantile_state.cpp | 4 +- be/src/vec/functions/function_string.h | 4 +- be/src/vec/functions/function_tokenize.cpp | 2 +- be/src/vec/functions/functions_geo.cpp | 16 +++---- be/src/vec/functions/functions_logical.cpp | 13 +++--- be/src/vec/functions/in.h | 2 +- be/src/vec/functions/least_greast.cpp | 2 +- be/src/vec/functions/round.h | 1 + be/src/vec/sink/vtablet_block_convertor.cpp | 5 ++- .../writer/iceberg/partition_transformers.h | 42 +++++++++---------- .../writer/iceberg/viceberg_table_writer.cpp | 2 +- be/src/vec/utils/util.hpp | 2 +- be/test/vec/columns/common_column_test.h | 6 +-- be/test/vec/data_types/from_string_test.cpp | 12 +++--- .../serde/data_type_serde_text_test.cpp | 12 +++--- be/test/vec/function/function_test_util.h | 2 +- be/test/vec/olap/char_type_padding_test.cpp | 6 +-- 66 files changed, 218 insertions(+), 195 deletions(-) diff --git a/be/src/exec/table_connector.cpp b/be/src/exec/table_connector.cpp index fa5181f5fecb2d..549fa6aae90fd8 100644 --- a/be/src/exec/table_connector.cpp +++ b/be/src/exec/table_connector.cpp @@ -118,16 +118,17 @@ Status TableConnector::convert_column_data(const vectorized::ColumnPtr& column_p fmt::format_to(_insert_stmt_buffer, "\"{}\"", str); } }; - const vectorized::IColumn* column = column_ptr; + const vectorized::IColumn* column = column_ptr.get(); if (type_ptr->is_nullable()) { - auto nullable_column = assert_cast(column_ptr.get()); + const auto* nullable_column = + assert_cast(column_ptr.get()); if (nullable_column->is_null_at(row)) { fmt::format_to(_insert_stmt_buffer, "{}", "NULL"); return Status::OK(); } column = nullable_column->get_nested_column_ptr().get(); } else { - column = column_ptr; + column = column_ptr.get(); } auto [item, size] = column->get_data_at(row); switch (type.type) { diff --git a/be/src/olap/push_handler.cpp b/be/src/olap/push_handler.cpp index 56d167459f5be7..eecb322384b698 100644 --- a/be/src/olap/push_handler.cpp +++ b/be/src/olap/push_handler.cpp @@ -518,7 +518,7 @@ Status PushBrokerReader::_convert_to_output_block(vectorized::Block* block) { column_ptr = _src_block.get_by_position(result_column_id).column; // column_ptr maybe a ColumnConst, convert it to a normal column column_ptr = column_ptr->convert_to_full_column_if_const(); - DCHECK(column_ptr != nullptr); + DCHECK(column_ptr); // because of src_slot_desc is always be nullable, so the column_ptr after do dest_expr // is likely to be nullable diff --git a/be/src/olap/rowset/segment_v2/column_reader.cpp b/be/src/olap/rowset/segment_v2/column_reader.cpp index 78c415530cd029..1abb60e58507ec 100644 --- a/be/src/olap/rowset/segment_v2/column_reader.cpp +++ b/be/src/olap/rowset/segment_v2/column_reader.cpp @@ -1267,8 +1267,8 @@ Status FileColumnIterator::next_batch(size_t* n, vectorized::MutableColumnPtr& d DCHECK_EQ(this_run, num_rows); } else { *has_null = true; - auto* null_col = - vectorized::check_and_get_column(dst); + const auto* null_col = + vectorized::check_and_get_column(dst.get()); if (null_col != nullptr) { const_cast(null_col)->insert_null_elements( this_run); @@ -1328,8 +1328,9 @@ Status FileColumnIterator::read_by_rowids(const rowid_t* rowids, const size_t co auto origin_index = _page.data_decoder->current_index(); if (this_read_count > 0) { if (is_null) { - auto* null_col = - vectorized::check_and_get_column(dst); + const auto* null_col = + vectorized::check_and_get_column( + dst.get()); if (UNLIKELY(null_col == nullptr)) { return Status::InternalError("unexpected column type in column reader"); } @@ -1710,9 +1711,9 @@ Status DefaultNestedColumnIterator::next_batch(size_t* n, vectorized::MutableCol static void fill_nested_with_defaults(vectorized::MutableColumnPtr& dst, vectorized::MutableColumnPtr& sibling_column, size_t nrows) { const auto* sibling_array = vectorized::check_and_get_column( - remove_nullable(sibling_column->get_ptr())); + remove_nullable(sibling_column->get_ptr()).get()); const auto* dst_array = vectorized::check_and_get_column( - remove_nullable(dst->get_ptr())); + remove_nullable(dst->get_ptr()).get()); if (!dst_array || !sibling_array) { throw doris::Exception(ErrorCode::INTERNAL_ERROR, "Expected array column, but met %s and %s", dst->get_name(), diff --git a/be/src/olap/rowset/segment_v2/hierarchical_data_reader.h b/be/src/olap/rowset/segment_v2/hierarchical_data_reader.h index f85038713cadb7..bd5de7484740a8 100644 --- a/be/src/olap/rowset/segment_v2/hierarchical_data_reader.h +++ b/be/src/olap/rowset/segment_v2/hierarchical_data_reader.h @@ -165,8 +165,8 @@ class HierarchicalDataReader : public ColumnIterator { // will type the type of ColumnObject::NESTED_TYPE, whih is Nullable>. for (auto& entry : nested_subcolumns) { MutableColumnPtr nested_object = ColumnObject::create(true, false); - const auto* base_array = - check_and_get_column(remove_nullable(entry.second[0].column)); + const auto* base_array = check_and_get_column( + remove_nullable(entry.second[0].column).get()); MutableColumnPtr offset = base_array->get_offsets_ptr()->assume_mutable(); auto* nested_object_ptr = assert_cast(nested_object.get()); // flatten nested arrays diff --git a/be/src/olap/rowset/segment_v2/segment_iterator.cpp b/be/src/olap/rowset/segment_v2/segment_iterator.cpp index 5f50ffeea2d8f0..366c6d3ce21a76 100644 --- a/be/src/olap/rowset/segment_v2/segment_iterator.cpp +++ b/be/src/olap/rowset/segment_v2/segment_iterator.cpp @@ -1955,8 +1955,7 @@ Status SegmentIterator::next_batch(vectorized::Block* block) { Status SegmentIterator::_convert_to_expected_type(const std::vector& col_ids) { for (ColumnId i : col_ids) { - if (_current_return_columns[i] == nullptr || _converted_column_ids[i] || - _is_pred_column[i]) { + if (!_current_return_columns[i] || _converted_column_ids[i] || _is_pred_column[i]) { continue; } if (!_segment->same_with_storage_type( @@ -1999,7 +1998,7 @@ Status SegmentIterator::copy_column_data_by_selector(vectorized::IColumn* input_ return Status::RuntimeError("copy_column_data_by_selector nullable mismatch"); } - return input_col_ptr->filter_by_selector(sel_rowid_idx, select_size, output_col); + return input_col_ptr->filter_by_selector(sel_rowid_idx, select_size, output_col.get()); } void SegmentIterator::_clear_iterators() { diff --git a/be/src/olap/schema_change.cpp b/be/src/olap/schema_change.cpp index 7f947612eed4ac..658ff05b67f0d6 100644 --- a/be/src/olap/schema_change.cpp +++ b/be/src/olap/schema_change.cpp @@ -337,7 +337,7 @@ Status BlockChanger::change_block(vectorized::Block* ref_block, int result_tmp_column_idx = -1; RETURN_IF_ERROR(ctx->execute(ref_block, &result_tmp_column_idx)); auto& result_tmp_column_def = ref_block->get_by_position(result_tmp_column_idx); - if (result_tmp_column_def.column == nullptr) { + if (!result_tmp_column_def.column) { return Status::Error( "result column={} is nullptr, input expr={}", result_tmp_column_def.name, apache::thrift::ThriftDebugString(*expr)); @@ -430,7 +430,7 @@ Status BlockChanger::_check_cast_valid(vectorized::ColumnPtr input_column, if (input_column->is_nullable() != output_column->is_nullable()) { if (input_column->is_nullable()) { const auto* ref_null_map = - vectorized::check_and_get_column(input_column) + vectorized::check_and_get_column(input_column.get()) ->get_null_map_column() .get_data() .data(); @@ -446,10 +446,12 @@ Status BlockChanger::_check_cast_valid(vectorized::ColumnPtr input_column, } } else { const auto& null_map_column = - vectorized::check_and_get_column(output_column) + vectorized::check_and_get_column( + output_column.get()) ->get_null_map_column(); const auto& nested_column = - vectorized::check_and_get_column(output_column) + vectorized::check_and_get_column( + output_column.get()) ->get_nested_column(); const auto* new_null_map = null_map_column.get_data().data(); @@ -481,12 +483,12 @@ Status BlockChanger::_check_cast_valid(vectorized::ColumnPtr input_column, if (input_column->is_nullable() && output_column->is_nullable()) { const auto* ref_null_map = - vectorized::check_and_get_column(input_column) + vectorized::check_and_get_column(input_column.get()) ->get_null_map_column() .get_data() .data(); const auto* new_null_map = - vectorized::check_and_get_column(output_column) + vectorized::check_and_get_column(output_column.get()) ->get_null_map_column() .get_data() .data(); diff --git a/be/src/pipeline/exec/hashjoin_build_sink.cpp b/be/src/pipeline/exec/hashjoin_build_sink.cpp index 47560875b51252..b71feff3ed4460 100644 --- a/be/src/pipeline/exec/hashjoin_build_sink.cpp +++ b/be/src/pipeline/exec/hashjoin_build_sink.cpp @@ -254,7 +254,7 @@ Status HashJoinBuildSinkLocalState::_extract_join_column( // update nulllmap and split nested out of ColumnNullable when serialize_null_into_key is false and column is nullable const auto& col_nested = nullable->get_nested_column(); const auto& col_nullmap = nullable->get_null_map_data(); - DCHECK(null_map != nullptr); + DCHECK(null_map); vectorized::VectorizedUtils::update_null_map(null_map->get_data(), col_nullmap); raw_ptrs[i] = &col_nested; } else { diff --git a/be/src/pipeline/exec/hashjoin_probe_operator.cpp b/be/src/pipeline/exec/hashjoin_probe_operator.cpp index 0db525f1bf5222..37ccd6206f3e0f 100644 --- a/be/src/pipeline/exec/hashjoin_probe_operator.cpp +++ b/be/src/pipeline/exec/hashjoin_probe_operator.cpp @@ -371,7 +371,7 @@ Status HashJoinProbeLocalState::_extract_join_column(vectorized::Block& block, _need_null_map_for_probe = _need_probe_null_map(block, res_col_ids); } if (_need_null_map_for_probe) { - if (_null_map_column == nullptr) { + if (!_null_map_column) { _null_map_column = vectorized::ColumnUInt8::create(); } _null_map_column->get_data().assign(block.rows(), (uint8_t)0); @@ -389,7 +389,7 @@ Status HashJoinProbeLocalState::_extract_join_column(vectorized::Block& block, // update nulllmap and split nested out of ColumnNullable when serialize_null_into_key is false and column is nullable const auto& col_nested = nullable->get_nested_column(); const auto& col_nullmap = nullable->get_null_map_data(); - DCHECK(_null_map_column != nullptr); + DCHECK(_null_map_column); vectorized::VectorizedUtils::update_null_map(_null_map_column->get_data(), col_nullmap); _probe_columns[i] = &col_nested; } else { diff --git a/be/src/pipeline/exec/join_probe_operator.cpp b/be/src/pipeline/exec/join_probe_operator.cpp index 11b5b29c8b556b..9a50d76a48ce8c 100644 --- a/be/src/pipeline/exec/join_probe_operator.cpp +++ b/be/src/pipeline/exec/join_probe_operator.cpp @@ -150,7 +150,7 @@ Status JoinProbeLocalState::_build_output_block( /// TODO: maybe need a method to check if a column need to be converted to full /// column. if (is_column_const(*origin_column) || - check_column(origin_column)) { + check_column(origin_column.get())) { auto column_ptr = origin_column->convert_to_full_column_if_const(); insert_column_datas(mutable_columns[i], column_ptr, rows); } else { diff --git a/be/src/pipeline/exec/olap_scan_operator.cpp b/be/src/pipeline/exec/olap_scan_operator.cpp index fa91caffa8ebc4..b1ab62743323c6 100644 --- a/be/src/pipeline/exec/olap_scan_operator.cpp +++ b/be/src/pipeline/exec/olap_scan_operator.cpp @@ -246,9 +246,8 @@ Status OlapScanLocalState::_should_push_down_function_filter(vectorized::Vectori DCHECK(children[1 - i]->type().is_string_type()); std::shared_ptr const_col_wrapper; RETURN_IF_ERROR(children[1 - i]->get_const_col(expr_ctx, &const_col_wrapper)); - if (const vectorized::ColumnConst* const_column = - check_and_get_column( - const_col_wrapper->column_ptr)) { + if (const auto* const_column = check_and_get_column( + const_col_wrapper->column_ptr.get())) { *constant_str = const_column->get_data_at(0); } else { pdt = PushDownType::UNACCEPTABLE; diff --git a/be/src/pipeline/exec/scan_operator.cpp b/be/src/pipeline/exec/scan_operator.cpp index ae4396b22c7eec..a73e1a6db7ccb7 100644 --- a/be/src/pipeline/exec/scan_operator.cpp +++ b/be/src/pipeline/exec/scan_operator.cpp @@ -520,8 +520,8 @@ Status ScanLocalState::_eval_const_conjuncts(vectorized::VExpr* vexpr, if (vexpr->is_constant()) { std::shared_ptr const_col_wrapper; RETURN_IF_ERROR(vexpr->get_const_col(expr_ctx, &const_col_wrapper)); - if (const auto* const_column = - check_and_get_column(const_col_wrapper->column_ptr)) { + if (const auto* const_column = check_and_get_column( + const_col_wrapper->column_ptr.get())) { constant_val = const_cast(const_column->get_data_at(0).data); if (constant_val == nullptr || !*reinterpret_cast(constant_val)) { *pdt = PushDownType::ACCEPTABLE; @@ -530,7 +530,7 @@ Status ScanLocalState::_eval_const_conjuncts(vectorized::VExpr* vexpr, } } else if (const auto* bool_column = check_and_get_column>( - const_col_wrapper->column_ptr)) { + const_col_wrapper->column_ptr.get())) { // TODO: If `vexpr->is_constant()` is true, a const column is expected here. // But now we still don't cover all predicates for const expression. // For example, for query `SELECT col FROM tbl WHERE 'PROMOTION' LIKE 'AAA%'`, @@ -690,7 +690,7 @@ Status ScanLocalState::_should_push_down_binary_predicate( std::shared_ptr const_col_wrapper; RETURN_IF_ERROR(children[1 - i]->get_const_col(expr_ctx, &const_col_wrapper)); if (const auto* const_column = check_and_get_column( - const_col_wrapper->column_ptr)) { + const_col_wrapper->column_ptr.get())) { *slot_ref_child = i; *constant_val = const_column->get_data_at(0); } else { diff --git a/be/src/vec/aggregate_functions/aggregate_function_window.h b/be/src/vec/aggregate_functions/aggregate_function_window.h index 0cef4c82d3dbfe..5d449318b7d2f5 100644 --- a/be/src/vec/aggregate_functions/aggregate_function_window.h +++ b/be/src/vec/aggregate_functions/aggregate_function_window.h @@ -402,7 +402,7 @@ struct LeadLagData { if (nullable_column->is_null_at(0)) { _default_value.reset(); } else { - _default_value.set_value(nullable_column->get_nested_column_ptr(), 0); + _default_value.set_value(nullable_column->get_nested_column_ptr().get(), 0); } } else { _default_value.set_value(column, 0); diff --git a/be/src/vec/columns/column_object.cpp b/be/src/vec/columns/column_object.cpp index 3d6a3e44436d29..4300725cacaf9c 100644 --- a/be/src/vec/columns/column_object.cpp +++ b/be/src/vec/columns/column_object.cpp @@ -1484,7 +1484,7 @@ Status ColumnObject::serialize_one_row_to_json_format(size_t row, rapidjson::Str #endif for (const auto& subcolumn : subcolumns) { RETURN_IF_ERROR(find_and_set_leave_value( - subcolumn->data.get_finalized_column_ptr(), subcolumn->path, + subcolumn->data.get_finalized_column_ptr().get(), subcolumn->path, subcolumn->data.get_least_common_type_serde(), subcolumn->data.get_least_common_type(), subcolumn->data.least_common_type.get_base_type_id(), root, @@ -1558,7 +1558,7 @@ Status ColumnObject::merge_sparse_to_root_column() { continue; } bool succ = find_and_set_leave_value( - column, subcolumn->path, subcolumn->data.get_least_common_type_serde(), + column.get(), subcolumn->path, subcolumn->data.get_least_common_type_serde(), subcolumn->data.get_least_common_type(), subcolumn->data.least_common_type.get_base_type_id(), root, doc_structure->GetAllocator(), mem_pool, i); @@ -1705,7 +1705,7 @@ bool ColumnObject::empty() const { } ColumnPtr get_base_column_of_array(const ColumnPtr& column) { - if (const auto* column_array = check_and_get_column(column)) { + if (const auto* column_array = check_and_get_column(column.get())) { return column_array->get_data_ptr(); } return column; diff --git a/be/src/vec/common/cow.h b/be/src/vec/common/cow.h index 95df7694f227d9..4970f649d32e85 100644 --- a/be/src/vec/common/cow.h +++ b/be/src/vec/common/cow.h @@ -203,8 +203,6 @@ class COW { operator bool() const { return t != nullptr; } - operator T*() const { return t; } - private: T* t = nullptr; }; @@ -346,8 +344,8 @@ class COW { operator const immutable_ptr&() const { return value; } operator immutable_ptr&() { return value; } - operator bool() const { return value != nullptr; } - bool operator!() const { return value == nullptr; } + operator bool() const { return value.get() != nullptr; } + bool operator!() const { return value.get() == nullptr; } bool operator==(const chameleon_ptr& rhs) const { return value == rhs.value; } bool operator!=(const chameleon_ptr& rhs) const { return value != rhs.value; } diff --git a/be/src/vec/exec/format/column_type_convert.cpp b/be/src/vec/exec/format/column_type_convert.cpp index a2c226c91d6799..0442158b690c39 100644 --- a/be/src/vec/exec/format/column_type_convert.cpp +++ b/be/src/vec/exec/format/column_type_convert.cpp @@ -99,7 +99,7 @@ ColumnPtr ColumnTypeConverter::get_column(const TypeDescriptor& src_type, Column return dst_column; } - if (_cached_src_column == nullptr) { + if (!_cached_src_column) { _cached_src_type = DataTypeFactory::instance().create_data_type(src_type, dst_type->is_nullable()); _cached_src_column = diff --git a/be/src/vec/exec/format/csv/csv_reader.cpp b/be/src/vec/exec/format/csv/csv_reader.cpp index b27bb050dc6e0c..d4a2dcfc7f3503 100644 --- a/be/src/vec/exec/format/csv/csv_reader.cpp +++ b/be/src/vec/exec/format/csv/csv_reader.cpp @@ -657,7 +657,7 @@ Status CsvReader::_fill_dest_columns(const Slice& line, Block* block, col_idx < _split_values.size() ? _split_values[col_idx] : _s_null_slice; Slice slice {value.data, value.size}; - IColumn* col_ptr = columns[i]; + IColumn* col_ptr = columns[i].get(); if (!_is_load) { col_ptr = const_cast( block->get_by_position(_file_slot_idx_map[i]).column.get()); @@ -700,7 +700,7 @@ Status CsvReader::_fill_dest_columns(const Slice& line, Block* block, Status CsvReader::_fill_empty_line(Block* block, std::vector& columns, size_t* rows) { for (int i = 0; i < _file_slot_descs.size(); ++i) { - IColumn* col_ptr = columns[i]; + IColumn* col_ptr = columns[i].get(); if (!_is_load) { col_ptr = const_cast( block->get_by_position(_file_slot_idx_map[i]).column.get()); diff --git a/be/src/vec/exec/format/json/new_json_reader.cpp b/be/src/vec/exec/format/json/new_json_reader.cpp index d79e86520741cd..adb22d588f53d3 100644 --- a/be/src/vec/exec/format/json/new_json_reader.cpp +++ b/be/src/vec/exec/format/json/new_json_reader.cpp @@ -886,7 +886,7 @@ Status NewJsonReader::_write_data_to_column(rapidjson::Value::ConstValueIterator if (column_ptr->is_nullable()) { nullable_column = reinterpret_cast(column_ptr); - data_column_ptr = nullable_column->get_nested_column().get_ptr(); + data_column_ptr = nullable_column->get_nested_column().get_ptr().get(); data_serde = serde->get_nested_serdes()[0]; if (value_is_null) { @@ -1010,7 +1010,8 @@ Status NewJsonReader::_write_data_to_column(rapidjson::Value::ConstValueIterator const auto& sub_col_type = type_desc.children[sub_col_idx]; RETURN_IF_ERROR(_write_data_to_column( - sub_value, sub_col_type, struct_column_ptr->get_column(sub_col_idx).get_ptr(), + sub_value, sub_col_type, + struct_column_ptr->get_column(sub_col_idx).get_ptr().get(), column_name + "." + type_desc.field_names[sub_col_idx], sub_serdes[sub_col_idx], valid)); } @@ -1026,12 +1027,12 @@ Status NewJsonReader::_write_data_to_column(rapidjson::Value::ConstValueIterator for (const auto& member_value : object_value) { RETURN_IF_ERROR(_write_data_to_column( &member_value.name, type_desc.children[0], - map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr(), + map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr().get(), column_name + ".key", sub_serdes[0], valid)); RETURN_IF_ERROR(_write_data_to_column( &member_value.value, type_desc.children[1], - map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr(), + map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr().get(), column_name + ".value", sub_serdes[1], valid)); } @@ -1048,7 +1049,7 @@ Status NewJsonReader::_write_data_to_column(rapidjson::Value::ConstValueIterator for (const auto& sub_value : array_value) { RETURN_IF_ERROR(_write_data_to_column(&sub_value, type_desc.children[0], - array_column_ptr->get_data().get_ptr(), + array_column_ptr->get_data().get_ptr().get(), column_name + ".element", sub_serdes[0], valid)); } auto& offsets = array_column_ptr->get_offsets(); @@ -1653,7 +1654,7 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& if (column_ptr->is_nullable()) { nullable_column = reinterpret_cast(column_ptr); - data_column_ptr = nullable_column->get_nested_column().get_ptr(); + data_column_ptr = nullable_column->get_nested_column().get_ptr().get(); data_serde = serde->get_nested_serdes()[0]; // kNullType will put 1 into the Null map, so there is no need to push 0 for kNullType. @@ -1727,7 +1728,7 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& const auto& sub_col_type = type_desc.children[sub_column_idx]; RETURN_IF_ERROR(_simdjson_write_data_to_column( - sub.value(), sub_col_type, sub_column_ptr, column_name + "." + sub_key, + sub.value(), sub_col_type, sub_column_ptr.get(), column_name + "." + sub_key, sub_serdes[sub_column_idx], valid)); } @@ -1768,7 +1769,7 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& auto nullable_column = static_cast(column_ptr); nullable_column->get_null_map_data().push_back(0); - data_column_ptr = nullable_column->get_nested_column().get_ptr(); + data_column_ptr = nullable_column->get_nested_column().get_ptr().get(); data_serde = serde->get_nested_serdes()[0]; } Slice slice(key_view.data(), key_view.length()); @@ -1779,13 +1780,13 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& }; RETURN_IF_ERROR(f(member_value.unescaped_key(), type_desc.children[0], - map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr(), + map_column_ptr->get_keys_ptr()->assume_mutable()->get_ptr().get(), sub_serdes[0], _serde_options, valid)); simdjson::ondemand::value field_value = member_value.value(); RETURN_IF_ERROR(_simdjson_write_data_to_column( field_value, type_desc.children[1], - map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr(), + map_column_ptr->get_values_ptr()->assume_mutable()->get_ptr().get(), column_name + ".value", sub_serdes[1], valid)); field_count++; } @@ -1807,7 +1808,7 @@ Status NewJsonReader::_simdjson_write_data_to_column(simdjson::ondemand::value& int field_count = 0; for (simdjson::ondemand::value sub_value : array_value) { RETURN_IF_ERROR(_simdjson_write_data_to_column( - sub_value, type_desc.children[0], array_column_ptr->get_data().get_ptr(), + sub_value, type_desc.children[0], array_column_ptr->get_data().get_ptr().get(), column_name + ".element", sub_serdes[0], valid)); field_count++; } diff --git a/be/src/vec/exec/format/parquet/parquet_column_convert.cpp b/be/src/vec/exec/format/parquet/parquet_column_convert.cpp index 0a5ef2913dd940..49636d809aa0d8 100644 --- a/be/src/vec/exec/format/parquet/parquet_column_convert.cpp +++ b/be/src/vec/exec/format/parquet/parquet_column_convert.cpp @@ -79,7 +79,7 @@ ColumnPtr PhysicalToLogicalConverter::get_physical_column(tparquet::Type::type s return dst_logical_column; } - if (_cached_src_physical_column == nullptr) { + if (!_cached_src_physical_column) { switch (src_physical_type) { case tparquet::Type::type::BOOLEAN: _cached_src_physical_type = std::make_shared(); diff --git a/be/src/vec/exec/format/wal/wal_reader.cpp b/be/src/vec/exec/format/wal/wal_reader.cpp index 22e6928216e1e8..a9a209b95a4ce1 100644 --- a/be/src/vec/exec/format/wal/wal_reader.cpp +++ b/be/src/vec/exec/format/wal/wal_reader.cpp @@ -92,7 +92,7 @@ Status WalReader::get_next_block(Block* block, size_t* read_rows, bool* eof) { pos, src_block.columns()); } vectorized::ColumnPtr column_ptr = src_block.get_by_position(pos).column; - if (column_ptr != nullptr && slot_desc->is_nullable()) { + if (!column_ptr && slot_desc->is_nullable()) { column_ptr = make_nullable(column_ptr); } dst_block.insert(index, vectorized::ColumnWithTypeAndName( diff --git a/be/src/vec/exec/jni_connector.cpp b/be/src/vec/exec/jni_connector.cpp index 11a58e81c98d89..4b5bb72e57bfbd 100644 --- a/be/src/vec/exec/jni_connector.cpp +++ b/be/src/vec/exec/jni_connector.cpp @@ -241,7 +241,7 @@ Status JniConnector::fill_block(Block* block, const ColumnNumbers& arguments, lo TableMetaAddress table_meta(table_address); long num_rows = table_meta.next_meta_as_long(); for (size_t i : arguments) { - if (block->get_by_position(i).column == nullptr) { + if (block->get_by_position(i).column.get() == nullptr) { auto return_type = block->get_data_type(i); bool result_nullable = return_type->is_nullable(); ColumnUInt8::MutablePtr null_col = nullptr; diff --git a/be/src/vec/exec/scan/vfile_scanner.cpp b/be/src/vec/exec/scan/vfile_scanner.cpp index 93a22d1a94bf52..15b681f597975e 100644 --- a/be/src/vec/exec/scan/vfile_scanner.cpp +++ b/be/src/vec/exec/scan/vfile_scanner.cpp @@ -596,7 +596,7 @@ Status VFileScanner::_convert_to_output_block(Block* block) { column_ptr = _src_block_ptr->get_by_position(result_column_id).column; // column_ptr maybe a ColumnConst, convert it to a normal column column_ptr = column_ptr->convert_to_full_column_if_const(); - DCHECK(column_ptr != nullptr); + DCHECK(column_ptr); // because of src_slot_desc is always be nullable, so the column_ptr after do dest_expr // is likely to be nullable diff --git a/be/src/vec/exec/scan/vmeta_scanner.cpp b/be/src/vec/exec/scan/vmeta_scanner.cpp index 289930b16bce85..db0256728741c7 100644 --- a/be/src/vec/exec/scan/vmeta_scanner.cpp +++ b/be/src/vec/exec/scan/vmeta_scanner.cpp @@ -148,7 +148,7 @@ Status VMetaScanner::_fill_block_with_remote_data(const std::vectoris_nullable()) { auto& null_col = reinterpret_cast(*col_ptr); null_col.get_null_map_data().push_back(0); - col_ptr = null_col.get_nested_column_ptr(); + col_ptr = null_col.get_nested_column_ptr().get(); } switch (slot_desc->type().type) { case TYPE_BOOLEAN: { diff --git a/be/src/vec/exprs/vcompound_pred.h b/be/src/vec/exprs/vcompound_pred.h index ff7649600b4c7f..e3c02f554b3d36 100644 --- a/be/src/vec/exprs/vcompound_pred.h +++ b/be/src/vec/exprs/vcompound_pred.h @@ -272,8 +272,10 @@ class VCompoundPred : public VectorizedFnCall { auto col_res = ColumnUInt8::create(size); auto col_nulls = ColumnUInt8::create(size); - auto* __restrict res_datas = assert_cast(col_res)->get_data().data(); - auto* __restrict res_nulls = assert_cast(col_nulls)->get_data().data(); + auto* __restrict res_datas = + assert_cast(col_res.get())->get_data().data(); + auto* __restrict res_nulls = + assert_cast(col_nulls.get())->get_data().data(); ColumnPtr temp_null_map = nullptr; // maybe both children are nullable / or one of children is nullable auto* __restrict lhs_null_map_tmp = create_null_map_column(temp_null_map, lhs_null_map); diff --git a/be/src/vec/functions/array/function_array_cum_sum.cpp b/be/src/vec/functions/array/function_array_cum_sum.cpp index 2f93a2a83b1a89..5fba7d4a619bd5 100644 --- a/be/src/vec/functions/array/function_array_cum_sum.cpp +++ b/be/src/vec/functions/array/function_array_cum_sum.cpp @@ -118,7 +118,7 @@ class FunctionArrayCumSum : public IFunction { // get null map const ColumnNullable* src_nested_nullable_col = check_and_get_column(*src_nested_column); - src_nested_column = src_nested_nullable_col->get_nested_column_ptr(); + src_nested_column = src_nested_nullable_col->get_nested_column_ptr().get(); const NullMapType& src_null_map = src_nested_nullable_col->get_null_map_column().get_data(); ColumnPtr res_nested_ptr; diff --git a/be/src/vec/functions/array/function_array_distinct.h b/be/src/vec/functions/array/function_array_distinct.h index 4b7e3e6f035d48..4d37f7cbcf7133 100644 --- a/be/src/vec/functions/array/function_array_distinct.h +++ b/be/src/vec/functions/array/function_array_distinct.h @@ -102,14 +102,14 @@ class FunctionArrayDistinct : public IFunction { if (src_nested_column->is_nullable()) { const auto* src_nested_nullable_col = check_and_get_column(*src_nested_column); - src_nested_column = src_nested_nullable_col->get_nested_column_ptr(); + src_nested_column = src_nested_nullable_col->get_nested_column_ptr().get(); src_null_map = &src_nested_nullable_col->get_null_map_column().get_data(); } NullMapType* dest_null_map = nullptr; if (dest_nested_column->is_nullable()) { auto* dest_nested_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr().get(); dest_null_map = &dest_nested_nullable_col->get_null_map_column().get_data(); } diff --git a/be/src/vec/functions/array/function_array_enumerate.cpp b/be/src/vec/functions/array/function_array_enumerate.cpp index 0e8bca3e5cd3b1..3846addb83bb55 100644 --- a/be/src/vec/functions/array/function_array_enumerate.cpp +++ b/be/src/vec/functions/array/function_array_enumerate.cpp @@ -83,7 +83,7 @@ class FunctionArrayEnumerate : public IFunction { auto left_column = block.get_by_position(arguments[0]).column->convert_to_full_column_if_const(); const ColumnArray* array = - check_and_get_column(remove_nullable(left_column->get_ptr())); + check_and_get_column(remove_nullable(left_column->get_ptr()).get()); if (!array) { return Status::RuntimeError( fmt::format("Illegal column {}, of first argument of function {}", @@ -107,7 +107,8 @@ class FunctionArrayEnumerate : public IFunction { ColumnPtr res_column = ColumnArray::create(std::move(nested_column), array->get_offsets_ptr()); if (block.get_by_position(arguments[0]).column->is_nullable()) { - const ColumnNullable* nullable = check_and_get_column(left_column); + const ColumnNullable* nullable = + check_and_get_column(left_column.get()); res_column = ColumnNullable::create( res_column, nullable->get_null_map_column().clone_resized(nullable->size())); } diff --git a/be/src/vec/functions/array/function_array_enumerate_uniq.cpp b/be/src/vec/functions/array/function_array_enumerate_uniq.cpp index 21d6ab40007b6e..bdee406655f196 100644 --- a/be/src/vec/functions/array/function_array_enumerate_uniq.cpp +++ b/be/src/vec/functions/array/function_array_enumerate_uniq.cpp @@ -128,7 +128,7 @@ class FunctionArrayEnumerateUniq : public IFunction { block.get_by_position(arguments[i]).column->convert_to_full_column_if_const()); ColumnPtr& cur_column = src_columns[i]; const ColumnArray* array = - check_and_get_column(remove_nullable(cur_column->get_ptr())); + check_and_get_column(remove_nullable(cur_column->get_ptr()).get()); if (!array) { return Status::RuntimeError( fmt::format("Illegal column {}, of first argument of function {}", @@ -151,7 +151,7 @@ class FunctionArrayEnumerateUniq : public IFunction { const NullMapType* null_map = nullptr; if (arguments.size() == 1 && data_columns[0]->is_nullable()) { const ColumnNullable* nullable = check_and_get_column(*data_columns[0]); - data_columns[0] = nullable->get_nested_column_ptr(); + data_columns[0] = nullable->get_nested_column_ptr().get(); null_map = &nullable->get_null_map_column().get_data(); } @@ -219,7 +219,8 @@ class FunctionArrayEnumerateUniq : public IFunction { if (arguments.size() == 1 && block.get_by_position(arguments[0]).column->is_nullable()) { auto left_column = block.get_by_position(arguments[0]).column->convert_to_full_column_if_const(); - const ColumnNullable* nullable = check_and_get_column(left_column); + const ColumnNullable* nullable = + check_and_get_column(left_column.get()); res_column = ColumnNullable::create( res_column, nullable->get_null_map_column().clone_resized(nullable->size())); } diff --git a/be/src/vec/functions/array/function_array_join.h b/be/src/vec/functions/array/function_array_join.h index 957b2288fb746a..29521c36111824 100644 --- a/be/src/vec/functions/array/function_array_join.h +++ b/be/src/vec/functions/array/function_array_join.h @@ -78,10 +78,11 @@ struct ArrayJoinImpl { auto nested_type = data_type_array->get_nested_type(); auto dest_column_ptr = ColumnString::create(); - DCHECK(dest_column_ptr != nullptr); + DCHECK(dest_column_ptr); - auto res_val = _execute_by_type(*src.nested_col, *src.offsets_ptr, src.nested_nullmap_data, - sep_str, null_replace_str, nested_type, dest_column_ptr); + auto res_val = + _execute_by_type(*src.nested_col, *src.offsets_ptr, src.nested_nullmap_data, + sep_str, null_replace_str, nested_type, dest_column_ptr.get()); if (!res_val) { return Status::RuntimeError(fmt::format( "execute failed or unsupported types for function {}({},{},{})", "array_join", diff --git a/be/src/vec/functions/array/function_array_map.h b/be/src/vec/functions/array/function_array_map.h index fd4a2fc59f3548..5bfe723e232884 100644 --- a/be/src/vec/functions/array/function_array_map.h +++ b/be/src/vec/functions/array/function_array_map.h @@ -165,7 +165,7 @@ struct ArrayMapImpl { static Status execute(ColumnPtr& res_ptr, ColumnArrayExecutionDatas datas, std::vector& col_const, size_t start_row, size_t end_row) { ColumnArrayMutableData dst = - create_mutable_data(datas[0].nested_col, datas[0].nested_nullmap_data); + create_mutable_data(datas[0].nested_col.get(), datas[0].nested_nullmap_data); if (_execute_internal(dst, datas, col_const, start_row, end_row)) { res_ptr = assemble_column_array(dst); return Status::OK(); diff --git a/be/src/vec/functions/array/function_array_pop.cpp b/be/src/vec/functions/array/function_array_pop.cpp index 2182699e0205b5..1ddd767cfaf3ce 100644 --- a/be/src/vec/functions/array/function_array_pop.cpp +++ b/be/src/vec/functions/array/function_array_pop.cpp @@ -75,7 +75,7 @@ class FunctionArrayPop : public IFunction { } // prepare dst array column bool is_nullable = src.nested_nullmap_data != nullptr; - ColumnArrayMutableData dst = create_mutable_data(src.nested_col, is_nullable); + ColumnArrayMutableData dst = create_mutable_data(src.nested_col.get(), is_nullable); dst.offsets_ptr->reserve(input_rows_count); // start from index depending on the PopType::start_offset auto offset_column = ColumnInt64::create(array_column->size(), PopType::start_offset); diff --git a/be/src/vec/functions/array/function_array_range.cpp b/be/src/vec/functions/array/function_array_range.cpp index 8a3de3754503ae..0980587660b20a 100644 --- a/be/src/vec/functions/array/function_array_range.cpp +++ b/be/src/vec/functions/array/function_array_range.cpp @@ -137,7 +137,7 @@ struct RangeImplUtil { IColumn* dest_nested_column = &dest_array_column_ptr->get_data(); ColumnNullable* dest_nested_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr().get(); auto& dest_nested_null_map = dest_nested_nullable_col->get_null_map_column().get_data(); auto args_null_map = ColumnUInt8::create(input_rows_count, 0); diff --git a/be/src/vec/functions/array/function_array_remove.h b/be/src/vec/functions/array/function_array_remove.h index 197b032b0f8a4b..661a18170ed9dc 100644 --- a/be/src/vec/functions/array/function_array_remove.h +++ b/be/src/vec/functions/array/function_array_remove.h @@ -107,13 +107,13 @@ class FunctionArrayRemove : public IFunction { auto dst_nested_column = ColumnNullable::create(nested_column.clone_empty(), ColumnUInt8::create()); array_nested_column = dst_nested_column->get_ptr(); - dst_column = dst_nested_column->get_nested_column_ptr(); + dst_column = dst_nested_column->get_nested_column_ptr().get(); dst_null_map = &dst_nested_column->get_null_map_data(); dst_null_map->reserve(offsets.back()); } else { auto dst_nested_column = nested_column.clone_empty(); array_nested_column = dst_nested_column->get_ptr(); - dst_column = dst_nested_column; + dst_column = dst_nested_column.get(); } auto& dst_data = reinterpret_cast(*dst_column).get_data(); @@ -179,13 +179,13 @@ class FunctionArrayRemove : public IFunction { auto dst_nested_column = ColumnNullable::create(nested_column.clone_empty(), ColumnUInt8::create()); array_nested_column = dst_nested_column->get_ptr(); - dst_column = dst_nested_column->get_nested_column_ptr(); + dst_column = dst_nested_column->get_nested_column_ptr().get(); dst_null_map = &dst_nested_column->get_null_map_data(); dst_null_map->reserve(offsets.back()); } else { auto dst_nested_column = nested_column.clone_empty(); array_nested_column = dst_nested_column->get_ptr(); - dst_column = dst_nested_column; + dst_column = dst_nested_column.get(); } auto& dst_offs = reinterpret_cast(*dst_column).get_offsets(); diff --git a/be/src/vec/functions/array/function_array_reverse.h b/be/src/vec/functions/array/function_array_reverse.h index 8567bc61158bab..9fc1623151801e 100644 --- a/be/src/vec/functions/array/function_array_reverse.h +++ b/be/src/vec/functions/array/function_array_reverse.h @@ -40,7 +40,7 @@ struct ArrayReverseImpl { } bool is_nullable = src.nested_nullmap_data ? true : false; - ColumnArrayMutableData dst = create_mutable_data(src.nested_col, is_nullable); + ColumnArrayMutableData dst = create_mutable_data(src.nested_col.get(), is_nullable); dst.offsets_ptr->reserve(input_rows_count); auto res_val = _execute_internal(*src.nested_col, *src.offsets_ptr, *dst.nested_col, diff --git a/be/src/vec/functions/array/function_array_set.h b/be/src/vec/functions/array/function_array_set.h index 1ecf6d72531c73..975268b1e61553 100644 --- a/be/src/vec/functions/array/function_array_set.h +++ b/be/src/vec/functions/array/function_array_set.h @@ -142,9 +142,9 @@ struct ArraySetImpl { bool right_const) { ColumnArrayMutableData dst; if (left_data.nested_nullmap_data || right_data.nested_nullmap_data) { - dst = create_mutable_data(left_data.nested_col, true); + dst = create_mutable_data(left_data.nested_col.get(), true); } else { - dst = create_mutable_data(left_data.nested_col, false); + dst = create_mutable_data(left_data.nested_col.get(), false); } ColumnPtr res_column; if (left_const) { diff --git a/be/src/vec/functions/array/function_array_slice.h b/be/src/vec/functions/array/function_array_slice.h index 2acd1d3fbe1fd4..76082b266026ea 100644 --- a/be/src/vec/functions/array/function_array_slice.h +++ b/be/src/vec/functions/array/function_array_slice.h @@ -89,7 +89,7 @@ class FunctionArraySlice : public IFunction { } // prepare dst array column bool is_nullable = src.nested_nullmap_data ? true : false; - ColumnArrayMutableData dst = create_mutable_data(src.nested_col, is_nullable); + ColumnArrayMutableData dst = create_mutable_data(src.nested_col.get(), is_nullable); dst.offsets_ptr->reserve(input_rows_count); // execute slice_array(dst, src, *offset_column, length_column.get()); diff --git a/be/src/vec/functions/array/function_array_sortby.cpp b/be/src/vec/functions/array/function_array_sortby.cpp index 899bb40fba1423..fe6799aaa2e876 100644 --- a/be/src/vec/functions/array/function_array_sortby.cpp +++ b/be/src/vec/functions/array/function_array_sortby.cpp @@ -95,13 +95,13 @@ class FunctionArraySortBy : public IFunction { src_column_array.get_offsets_column().clone_resized(input_rows_count); MutableColumnPtr result_nullmap = nullptr; const ColumnUInt8::Container* src_null_map_data = nullptr; - if (argument_nullmap[0] != nullptr) { + if (argument_nullmap[0]) { const auto& src_column_nullmap = assert_cast(*argument_nullmap[0]); result_nullmap = src_column_nullmap.clone_resized(input_rows_count); src_null_map_data = &(src_column_nullmap.get_data()); } const ColumnUInt8::Container* key_null_map_data = nullptr; - if (argument_nullmap[1] != nullptr) { + if (argument_nullmap[1]) { const auto& key_column_nullmap = assert_cast(*argument_nullmap[1]); key_null_map_data = &(key_column_nullmap.get_data()); } @@ -149,7 +149,7 @@ class FunctionArraySortBy : public IFunction { } } src_nested_nullable_column.append_data_by_selector(result_data_column, src_selector); - if (result_nullmap != nullptr) { + if (result_nullmap) { block.replace_by_position( result, ColumnNullable::create(ColumnArray::create(std::move(result_data_column), diff --git a/be/src/vec/functions/array/function_arrays_overlap.h b/be/src/vec/functions/array/function_arrays_overlap.h index dd993100885e3a..8ac21bcd710f8d 100644 --- a/be/src/vec/functions/array/function_arrays_overlap.h +++ b/be/src/vec/functions/array/function_arrays_overlap.h @@ -370,11 +370,11 @@ class FunctionArraysOverlap : public IFunction { ExecutorImpl impl; if (right_size < left_size) { - impl.insert_array(right_data.nested_col, right_start, right_size); - dst_data[row] = impl.find_any(left_data.nested_col, left_start, left_size); + impl.insert_array(right_data.nested_col.get(), right_start, right_size); + dst_data[row] = impl.find_any(left_data.nested_col.get(), left_start, left_size); } else { - impl.insert_array(left_data.nested_col, left_start, left_size); - dst_data[row] = impl.find_any(right_data.nested_col, right_start, right_size); + impl.insert_array(left_data.nested_col.get(), left_start, left_size); + dst_data[row] = impl.find_any(right_data.nested_col.get(), right_start, right_size); } } return Status::OK(); diff --git a/be/src/vec/functions/comparison_equal_for_null.cpp b/be/src/vec/functions/comparison_equal_for_null.cpp index 919f9ebed65a7c..35719cf573008a 100644 --- a/be/src/vec/functions/comparison_equal_for_null.cpp +++ b/be/src/vec/functions/comparison_equal_for_null.cpp @@ -139,18 +139,20 @@ class FunctionEqForNull : public IFunction { left_column = check_and_get_column( assert_cast( col_left.column.get()) - ->get_data_column_ptr()); + ->get_data_column_ptr() + .get()); } else { - left_column = check_and_get_column(col_left.column); + left_column = check_and_get_column(col_left.column.get()); } if (right_const) { right_column = check_and_get_column( assert_cast( col_right.column.get()) - ->get_data_column_ptr()); + ->get_data_column_ptr() + .get()); } else { - right_column = check_and_get_column(col_right.column); + right_column = check_and_get_column(col_right.column.get()); } bool left_nullable = left_column != nullptr; diff --git a/be/src/vec/functions/function_agg_state.h b/be/src/vec/functions/function_agg_state.h index f4b7aef23af220..84a8d4f6f8b055 100644 --- a/be/src/vec/functions/function_agg_state.h +++ b/be/src/vec/functions/function_agg_state.h @@ -82,7 +82,7 @@ class FunctionAggState : public IFunction { save_columns.push_back(column); } - agg_columns.push_back(column); + agg_columns.push_back(column.get()); } _agg_function->streaming_agg_serialize_to_column(agg_columns.data(), col, input_rows_count, &(context->get_arena())); diff --git a/be/src/vec/functions/function_binary_arithmetic.h b/be/src/vec/functions/function_binary_arithmetic.h index 4c0b8e7a0890dc..a2757b38346247 100644 --- a/be/src/vec/functions/function_binary_arithmetic.h +++ b/be/src/vec/functions/function_binary_arithmetic.h @@ -165,7 +165,7 @@ struct BinaryOperationImpl { static ColumnPtr adapt_normal_vector_constant(ColumnPtr column_left, B b) { auto column_left_ptr = - check_and_get_column(column_left); + check_and_get_column(column_left.get()); auto column_result = Base::ColumnVectorResult::create(column_left->size()); DCHECK(column_left_ptr != nullptr); @@ -182,7 +182,7 @@ struct BinaryOperationImpl { static ColumnPtr adapt_normal_constant_vector(A a, ColumnPtr column_right) { auto column_right_ptr = - check_and_get_column(column_right); + check_and_get_column(column_right.get()); auto column_result = Base::ColumnVectorResult::create(column_right->size()); DCHECK(column_right_ptr != nullptr); @@ -199,9 +199,9 @@ struct BinaryOperationImpl { static ColumnPtr adapt_normal_vector_vector(ColumnPtr column_left, ColumnPtr column_right) { auto column_left_ptr = - check_and_get_column(column_left); + check_and_get_column(column_left.get()); auto column_right_ptr = - check_and_get_column(column_right); + check_and_get_column(column_right.get()); auto column_result = Base::ColumnVectorResult::create(column_left->size()); DCHECK(column_left_ptr != nullptr && column_right_ptr != nullptr); @@ -447,7 +447,8 @@ struct DecimalBinaryOperation { auto type_result = assert_cast&, TypeCheckOnRelease::DISABLE>( *res_data_type); - auto column_left_ptr = check_and_get_column(column_left); + auto column_left_ptr = + check_and_get_column(column_left.get()); auto column_result = ColumnDecimal::create( column_left->size(), assert_cast&, TypeCheckOnRelease::DISABLE>( @@ -482,7 +483,8 @@ struct DecimalBinaryOperation { auto type_result = assert_cast&, TypeCheckOnRelease::DISABLE>( *res_data_type); - auto column_right_ptr = check_and_get_column(column_right); + auto column_right_ptr = + check_and_get_column(column_right.get()); auto column_result = ColumnDecimal::create( column_right->size(), assert_cast&, TypeCheckOnRelease::DISABLE>( @@ -515,8 +517,10 @@ struct DecimalBinaryOperation { const ResultType& max_result_number, const ResultType& scale_diff_multiplier, DataTypePtr res_data_type) { - auto column_left_ptr = check_and_get_column(column_left); - auto column_right_ptr = check_and_get_column(column_right); + auto column_left_ptr = + check_and_get_column(column_left.get()); + auto column_right_ptr = + check_and_get_column(column_right.get()); const auto& type_result = assert_cast&>(*res_data_type); auto column_result = @@ -847,8 +851,8 @@ struct ConstOrVectorAdapter { static ColumnPtr constant_constant(ColumnPtr column_left, ColumnPtr column_right, const LeftDataType& type_left, const RightDataType& type_right, DataTypePtr res_data_type) { - auto column_left_ptr = check_and_get_column(column_left); - auto column_right_ptr = check_and_get_column(column_right); + const auto* column_left_ptr = check_and_get_column(column_left.get()); + const auto* column_right_ptr = check_and_get_column(column_right.get()); DCHECK(column_left_ptr != nullptr && column_right_ptr != nullptr); ColumnPtr column_result = nullptr; @@ -875,7 +879,7 @@ struct ConstOrVectorAdapter { static ColumnPtr vector_constant(ColumnPtr column_left, ColumnPtr column_right, const LeftDataType& type_left, const RightDataType& type_right, DataTypePtr res_data_type) { - auto column_right_ptr = check_and_get_column(column_right); + const auto* column_right_ptr = check_and_get_column(column_right.get()); DCHECK(column_right_ptr != nullptr); if constexpr (result_is_decimal) { @@ -894,7 +898,7 @@ struct ConstOrVectorAdapter { static ColumnPtr constant_vector(ColumnPtr column_left, ColumnPtr column_right, const LeftDataType& type_left, const RightDataType& type_right, DataTypePtr res_data_type) { - auto column_left_ptr = check_and_get_column(column_left); + const auto* column_left_ptr = check_and_get_column(column_left.get()); DCHECK(column_left_ptr != nullptr); if constexpr (result_is_decimal) { diff --git a/be/src/vec/functions/function_bitmap.cpp b/be/src/vec/functions/function_bitmap.cpp index 92a5dba7b7a4d4..96cae50a9baf9a 100644 --- a/be/src/vec/functions/function_bitmap.cpp +++ b/be/src/vec/functions/function_bitmap.cpp @@ -1211,7 +1211,7 @@ class FunctionBitmapToArray : public IFunction { IColumn* dest_nested_column = &dest_array_column_ptr->get_data(); ColumnNullable* dest_nested_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nested_nullable_col->get_nested_column_ptr().get(); auto& dest_nested_null_map = dest_nested_nullable_col->get_null_map_column().get_data(); auto& arg_col = block.get_by_position(arguments[0]).column; diff --git a/be/src/vec/functions/function_bitmap_variadic.cpp b/be/src/vec/functions/function_bitmap_variadic.cpp index 6e1a103fdbd83b..47a159e3c2f391 100644 --- a/be/src/vec/functions/function_bitmap_variadic.cpp +++ b/be/src/vec/functions/function_bitmap_variadic.cpp @@ -247,7 +247,7 @@ class FunctionBitMapVariadic : public IFunction { vec_res.resize(input_rows_count); RETURN_IF_ERROR(Impl::vector_vector(argument_columns.data(), argument_size, - input_rows_count, vec_res, col_res_nulls)); + input_rows_count, vec_res, col_res_nulls.get())); if (!use_default_implementation_for_nulls() && result_info.type->is_nullable()) { block.replace_by_position( result, ColumnNullable::create(std::move(col_res), std::move(col_res_nulls))); diff --git a/be/src/vec/functions/function_case.h b/be/src/vec/functions/function_case.h index af44ea0d9b1ace..81f08f682ef0ef 100644 --- a/be/src/vec/functions/function_case.h +++ b/be/src/vec/functions/function_case.h @@ -318,7 +318,7 @@ class FunctionCase : public IFunction { const uint8* __restrict then_idx, CaseWhenColumnHolder& column_holder) const { for (auto& then_ptr : column_holder.then_ptrs) { - then_ptr->reset(then_ptr.value()->convert_to_full_column_if_const()); + then_ptr->reset(then_ptr.value()->convert_to_full_column_if_const().get()); } size_t rows_count = column_holder.rows_count; diff --git a/be/src/vec/functions/function_cast.h b/be/src/vec/functions/function_cast.h index af9e9d19267073..483e837de5dfd8 100644 --- a/be/src/vec/functions/function_cast.h +++ b/be/src/vec/functions/function_cast.h @@ -770,7 +770,7 @@ struct ConvertImplGenericFromJsonb { continue; } ReadBuffer read_buffer((char*)(input_str.data()), input_str.size()); - Status st = data_type_to->from_string(read_buffer, col_to); + Status st = data_type_to->from_string(read_buffer, col_to.get()); // if parsing failed, will return null (*vec_null_map_to)[i] = !st.ok(); if (!st.ok()) { diff --git a/be/src/vec/functions/function_collection_in.h b/be/src/vec/functions/function_collection_in.h index ce58d63f44b655..35299c7ea672be 100644 --- a/be/src/vec/functions/function_collection_in.h +++ b/be/src/vec/functions/function_collection_in.h @@ -117,7 +117,8 @@ class FunctionCollectionIn : public IFunction { DCHECK(const_column_ptr != nullptr); const auto& [col, _] = unpack_if_const(const_column_ptr->column_ptr); if (col->is_nullable()) { - auto* null_col = vectorized::check_and_get_column(col); + const auto* null_col = + vectorized::check_and_get_column(col.get()); if (null_col->has_null()) { state->null_in_set = true; } else { @@ -161,7 +162,7 @@ class FunctionCollectionIn : public IFunction { if (materialized_column_not_null->is_nullable()) { materialized_column_not_null = assert_cast( vectorized::check_and_get_column( - materialized_column_not_null) + materialized_column_not_null.get()) ->get_nested_column_ptr()); } diff --git a/be/src/vec/functions/function_date_or_datetime_computation.h b/be/src/vec/functions/function_date_or_datetime_computation.h index 330ea75cba96c8..224bf49179177c 100644 --- a/be/src/vec/functions/function_date_or_datetime_computation.h +++ b/be/src/vec/functions/function_date_or_datetime_computation.h @@ -878,7 +878,7 @@ struct CurrentDateTimeImpl { bool use_const; if constexpr (WithPrecision) { if (const auto* const_column = check_and_get_column( - block.get_by_position(arguments[0]).column)) { + block.get_by_position(arguments[0]).column.get())) { int64_t scale = const_column->get_int(0); dtv.from_unixtime(context->state()->timestamp_ms() / 1000, context->state()->nano_seconds(), @@ -892,7 +892,7 @@ struct CurrentDateTimeImpl { use_const = true; } else if (const auto* nullable_column = check_and_get_column( - block.get_by_position(arguments[0]).column)) { + block.get_by_position(arguments[0]).column.get())) { const auto& null_map = nullable_column->get_null_map_data(); const auto& nested_column = assert_cast( nullable_column->get_nested_column_ptr().get()); diff --git a/be/src/vec/functions/function_ip.h b/be/src/vec/functions/function_ip.h index 1a1c23e2b06c35..9f2f4dc28868b4 100644 --- a/be/src/vec/functions/function_ip.h +++ b/be/src/vec/functions/function_ip.h @@ -768,11 +768,13 @@ class FunctionIsIPAddressInRange : public IFunction { if (is_ipv4(addr_column_with_type_and_name.type)) { execute_impl_with_ip( input_rows_count, addr_const, cidr_const, - assert_cast(cidr_column.get()), addr_column, col_res); + assert_cast(cidr_column.get()), addr_column, + col_res.get()); } else if (is_ipv6(addr_column_with_type_and_name.type)) { execute_impl_with_ip( input_rows_count, addr_const, cidr_const, - assert_cast(cidr_column.get()), addr_column, col_res); + assert_cast(cidr_column.get()), addr_column, + col_res.get()); } else { const auto* str_addr_column = assert_cast(addr_column.get()); const auto* str_cidr_column = assert_cast(cidr_column.get()); diff --git a/be/src/vec/functions/function_jsonb.cpp b/be/src/vec/functions/function_jsonb.cpp index 463508169aadc6..dcae26f3c2f844 100644 --- a/be/src/vec/functions/function_jsonb.cpp +++ b/be/src/vec/functions/function_jsonb.cpp @@ -459,11 +459,12 @@ class FunctionJsonbKeys : public IFunction { // prepare jsonb data column jsonb_data_column = unpack_if_const(block.get_by_position(arguments[0]).column).first; if (block.get_by_position(arguments[0]).column->is_nullable()) { - const auto* nullable = check_and_get_column(jsonb_data_column); + const auto* nullable = check_and_get_column(jsonb_data_column.get()); jsonb_data_column = nullable->get_nested_column_ptr(); data_null_map = &nullable->get_null_map_data(); } - const ColumnString* col_from_string = check_and_get_column(jsonb_data_column); + const ColumnString* col_from_string = + check_and_get_column(jsonb_data_column.get()); // prepare parse path column prepare, maybe we do not have path column ColumnPtr jsonb_path_column = nullptr; @@ -475,11 +476,12 @@ class FunctionJsonbKeys : public IFunction { std::tie(jsonb_path_column, path_const) = unpack_if_const(block.get_by_position(arguments[1]).column); if (block.get_by_position(arguments[1]).column->is_nullable()) { - const auto* nullable = check_and_get_column(jsonb_path_column); + const auto* nullable = + check_and_get_column(jsonb_path_column.get()); jsonb_path_column = nullable->get_nested_column_ptr(); path_null_map = &nullable->get_null_map_data(); } - jsonb_path_col = check_and_get_column(jsonb_path_column); + jsonb_path_col = check_and_get_column(jsonb_path_column.get()); } auto null_map = ColumnUInt8::create(input_rows_count, 0); @@ -1844,9 +1846,10 @@ class FunctionJsonSearch : public IFunction { // prepare jsonb data column std::tie(col_json, json_is_const) = unpack_if_const(block.get_by_position(arguments[0]).column); - const ColumnString* col_json_string = check_and_get_column(col_json); - if (auto* nullable = check_and_get_column(col_json)) { - col_json_string = check_and_get_column(nullable->get_nested_column_ptr()); + const ColumnString* col_json_string = check_and_get_column(col_json.get()); + if (auto* nullable = check_and_get_column(col_json.get())) { + col_json_string = + check_and_get_column(nullable->get_nested_column_ptr().get()); } if (!col_json_string) { @@ -1873,8 +1876,8 @@ class FunctionJsonSearch : public IFunction { // prepare jsonb data column std::tie(col_one, one_is_const) = unpack_if_const(block.get_by_position(arguments[1]).column); - const ColumnString* col_one_string = check_and_get_column(col_one); - if (auto* nullable = check_and_get_column(col_one)) { + const ColumnString* col_one_string = check_and_get_column(col_one.get()); + if (auto* nullable = check_and_get_column(col_one.get())) { col_one_string = check_and_get_column(*nullable->get_nested_column_ptr()); } if (!col_one_string) { @@ -1921,8 +1924,9 @@ class FunctionJsonSearch : public IFunction { std::tie(col_search, search_is_const) = unpack_if_const(block.get_by_position(arguments[2]).column); - const ColumnString* col_search_string = check_and_get_column(col_search); - if (auto* nullable = check_and_get_column(col_search)) { + const ColumnString* col_search_string = + check_and_get_column(col_search.get()); + if (auto* nullable = check_and_get_column(col_search.get())) { col_search_string = check_and_get_column(*nullable->get_nested_column_ptr()); } diff --git a/be/src/vec/functions/function_nullables.cpp b/be/src/vec/functions/function_nullables.cpp index 91bce24f48fc8b..b1e72ff52a71f4 100644 --- a/be/src/vec/functions/function_nullables.cpp +++ b/be/src/vec/functions/function_nullables.cpp @@ -54,7 +54,8 @@ class FunctionNullable : public IFunction { Status execute_impl(FunctionContext* context, Block& block, const ColumnNumbers& arguments, uint32_t result, size_t input_rows_count) const override { ColumnPtr& col = block.get_by_position(arguments[0]).column; - if (const auto* col_null = check_and_get_column(col); col_null == nullptr) { + if (const auto* col_null = check_and_get_column(col.get()); + col_null == nullptr) { // not null block.replace_by_position( result, ColumnNullable::create(col, ColumnBool::create(input_rows_count, 0))); @@ -85,7 +86,7 @@ class FunctionNonNullable : public IFunction { Status execute_impl(FunctionContext* context, Block& block, const ColumnNumbers& arguments, uint32_t result, size_t input_rows_count) const override { auto& data = block.get_by_position(arguments[0]); - if (const auto* col_null = check_and_get_column(data.column); + if (const auto* col_null = check_and_get_column(data.column.get()); col_null == nullptr) // raise error if input is not nullable. { return Status::InvalidArgument( diff --git a/be/src/vec/functions/function_quantile_state.cpp b/be/src/vec/functions/function_quantile_state.cpp index 95afbf1db32d23..8f8740841c5407 100644 --- a/be/src/vec/functions/function_quantile_state.cpp +++ b/be/src/vec/functions/function_quantile_state.cpp @@ -130,7 +130,7 @@ class FunctionToQuantileState : public IFunction { const ColumnPtr& column = block.get_by_position(arguments[0]).column; const DataTypePtr& data_type = block.get_by_position(arguments[0]).type; auto compression_arg = check_and_get_column_const( - block.get_by_position(arguments.back()).column); + block.get_by_position(arguments.back()).column.get()); float compression = 2048; if (compression_arg) { auto compression_arg_val = compression_arg->get_value(); @@ -189,7 +189,7 @@ class FunctionQuantileStatePercent : public IFunction { auto str_col = assert_cast(column.get()); auto& col_data = str_col->get_data(); auto percent_arg = check_and_get_column_const( - block.get_by_position(arguments.back()).column); + block.get_by_position(arguments.back()).column.get()); if (!percent_arg) { return Status::InternalError( diff --git a/be/src/vec/functions/function_string.h b/be/src/vec/functions/function_string.h index 14926e1062c020..a729af5948a73f 100644 --- a/be/src/vec/functions/function_string.h +++ b/be/src/vec/functions/function_string.h @@ -2142,7 +2142,7 @@ class FunctionSplitByString : public IFunction { NullMapType* dest_nested_null_map = nullptr; auto* dest_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nullable_col->get_nested_column_ptr().get(); dest_nested_null_map = &dest_nullable_col->get_null_map_column().get_data(); const auto* col_left = check_and_get_column(src_column.get()); @@ -4436,7 +4436,7 @@ class FunctionTranslate : public IFunction { } else if (is_ascii) { impl_vectors = impl_vectors_ascii; } - impl_vectors(col_source, col_from, col_to, col_res); + impl_vectors(col_source, col_from, col_to, col_res.get()); block.get_by_position(result).column = std::move(col_res); return Status::OK(); } diff --git a/be/src/vec/functions/function_tokenize.cpp b/be/src/vec/functions/function_tokenize.cpp index 0bcd31af40dac7..f0a7c3b68aec49 100644 --- a/be/src/vec/functions/function_tokenize.cpp +++ b/be/src/vec/functions/function_tokenize.cpp @@ -129,7 +129,7 @@ Status FunctionTokenize::execute_impl(FunctionContext* /*context*/, Block& block NullMapType* dest_nested_null_map = nullptr; ColumnNullable* dest_nullable_col = reinterpret_cast(dest_nested_column); - dest_nested_column = dest_nullable_col->get_nested_column_ptr(); + dest_nested_column = dest_nullable_col->get_nested_column_ptr().get(); dest_nested_null_map = &dest_nullable_col->get_null_map_column().get_data(); if (auto col_left = check_and_get_column(src_column.get())) { diff --git a/be/src/vec/functions/functions_geo.cpp b/be/src/vec/functions/functions_geo.cpp index 6d75258d146ff7..0a752af18fe04c 100644 --- a/be/src/vec/functions/functions_geo.cpp +++ b/be/src/vec/functions/functions_geo.cpp @@ -258,10 +258,10 @@ struct StDistanceSphere { ColumnPtr y_lat_origin = block.get_by_position(arguments[3]).column->convert_to_full_column_if_const(); - const auto* x_lng = check_and_get_column(x_lng_origin); - const auto* x_lat = check_and_get_column(x_lat_origin); - const auto* y_lng = check_and_get_column(y_lng_origin); - const auto* y_lat = check_and_get_column(y_lat_origin); + const auto* x_lng = check_and_get_column(x_lng_origin.get()); + const auto* x_lat = check_and_get_column(x_lat_origin.get()); + const auto* y_lng = check_and_get_column(y_lng_origin.get()); + const auto* y_lat = check_and_get_column(y_lat_origin.get()); CHECK(x_lng && x_lat && y_lng && y_lat); const auto size = x_lng->size(); @@ -305,10 +305,10 @@ struct StAngleSphere { ColumnPtr y_lat_origin = block.get_by_position(arguments[3]).column->convert_to_full_column_if_const(); - const auto* x_lng = check_and_get_column(x_lng_origin); - const auto* x_lat = check_and_get_column(x_lat_origin); - const auto* y_lng = check_and_get_column(y_lng_origin); - const auto* y_lat = check_and_get_column(y_lat_origin); + const auto* x_lng = check_and_get_column(x_lng_origin.get()); + const auto* x_lat = check_and_get_column(x_lat_origin.get()); + const auto* y_lng = check_and_get_column(y_lng_origin.get()); + const auto* y_lat = check_and_get_column(y_lat_origin.get()); CHECK(x_lng && x_lat && y_lng && y_lat); const auto size = x_lng->size(); diff --git a/be/src/vec/functions/functions_logical.cpp b/be/src/vec/functions/functions_logical.cpp index 0f474851f032ee..f99f0447725edd 100644 --- a/be/src/vec/functions/functions_logical.cpp +++ b/be/src/vec/functions/functions_logical.cpp @@ -141,11 +141,11 @@ void basic_execute_impl(ColumnRawPtrs arguments, ColumnWithTypeAndName& result_i size_t input_rows_count) { auto col_res = ColumnUInt8::create(input_rows_count); if (auto l = check_and_get_column(arguments[0])) { - vector_const(arguments[1], l, col_res, input_rows_count); + vector_const(arguments[1], l, col_res.get(), input_rows_count); } else if (auto r = check_and_get_column(arguments[1])) { - vector_const(arguments[0], r, col_res, input_rows_count); + vector_const(arguments[0], r, col_res.get(), input_rows_count); } else { - vector_vector(arguments[0], arguments[1], col_res, input_rows_count); + vector_vector(arguments[0], arguments[1], col_res.get(), input_rows_count); } result_info.column = std::move(col_res); } @@ -156,11 +156,12 @@ void null_execute_impl(ColumnRawPtrs arguments, ColumnWithTypeAndName& result_in auto col_nulls = ColumnUInt8::create(input_rows_count); auto col_res = ColumnUInt8::create(input_rows_count); if (auto l = check_and_get_column(arguments[0])) { - vector_const_null(arguments[1], l, col_res, col_nulls, input_rows_count); + vector_const_null(arguments[1], l, col_res.get(), col_nulls.get(), input_rows_count); } else if (auto r = check_and_get_column(arguments[1])) { - vector_const_null(arguments[0], r, col_res, col_nulls, input_rows_count); + vector_const_null(arguments[0], r, col_res.get(), col_nulls.get(), input_rows_count); } else { - vector_vector_null(arguments[0], arguments[1], col_res, col_nulls, input_rows_count); + vector_vector_null(arguments[0], arguments[1], col_res.get(), col_nulls.get(), + input_rows_count); } result_info.column = ColumnNullable::create(std::move(col_res), std::move(col_nulls)); } diff --git a/be/src/vec/functions/in.h b/be/src/vec/functions/in.h index 5d590190182801..6f697ba7441df5 100644 --- a/be/src/vec/functions/in.h +++ b/be/src/vec/functions/in.h @@ -216,7 +216,7 @@ class FunctionIn : public IFunction { if (materialized_column->is_nullable()) { const auto* null_col_ptr = vectorized::check_and_get_column( - materialized_column); + materialized_column.get()); const auto& null_map = assert_cast( null_col_ptr->get_null_map_column()) .get_data(); diff --git a/be/src/vec/functions/least_greast.cpp b/be/src/vec/functions/least_greast.cpp index 7d1953f7041174..9ad53c4f531529 100644 --- a/be/src/vec/functions/least_greast.cpp +++ b/be/src/vec/functions/least_greast.cpp @@ -173,7 +173,7 @@ struct FunctionFieldImpl { size_t input_rows_count) { const auto& data_type = block.get_by_position(arguments[0]).type; auto result_column = ColumnInt32::create(input_rows_count, 0); - auto& res_data = static_cast(result_column)->get_data(); + auto& res_data = static_cast(result_column.get())->get_data(); const auto& column_size = arguments.size(); std::vector argument_columns(column_size); diff --git a/be/src/vec/functions/round.h b/be/src/vec/functions/round.h index 3f4f9c60fcbe3d..3b821f0aa528a4 100644 --- a/be/src/vec/functions/round.h +++ b/be/src/vec/functions/round.h @@ -731,6 +731,7 @@ class FunctionRounding : public IFunction { const auto* col_general = is_col_general_const ? assert_cast(*column_general.column) .get_data_column_ptr() + .get() : column_general.column.get(); ColumnPtr res; diff --git a/be/src/vec/sink/vtablet_block_convertor.cpp b/be/src/vec/sink/vtablet_block_convertor.cpp index 26de6ea6c7e3d1..466902a4f907ab 100644 --- a/be/src/vec/sink/vtablet_block_convertor.cpp +++ b/be/src/vec/sink/vtablet_block_convertor.cpp @@ -506,7 +506,8 @@ Status OlapTableBlockConvertor::_fill_auto_inc_cols(vectorized::Block* block, si vectorized::ColumnInt64::Container& dst_values = dst_column->get_data(); vectorized::ColumnPtr src_column_ptr = block->get_by_position(idx).column; - if (const auto* const_column = check_and_get_column(src_column_ptr)) { + if (const auto* const_column = + check_and_get_column(src_column_ptr.get())) { // for insert stmt like "insert into tbl1 select null,col1,col2,... from tbl2" or // "insert into tbl1 select 1,col1,col2,... from tbl2", the type of literal's column // will be `ColumnConst` @@ -530,7 +531,7 @@ Status OlapTableBlockConvertor::_fill_auto_inc_cols(vectorized::Block* block, si dst_values.resize_fill(rows, value); } } else if (const auto* src_nullable_column = - check_and_get_column(src_column_ptr)) { + check_and_get_column(src_column_ptr.get())) { auto src_nested_column_ptr = src_nullable_column->get_nested_column_ptr(); const auto& null_map_data = src_nullable_column->get_null_map_data(); dst_values.reserve(rows); diff --git a/be/src/vec/sink/writer/iceberg/partition_transformers.h b/be/src/vec/sink/writer/iceberg/partition_transformers.h index 79eb385b298a8f..0b18ce249522eb 100644 --- a/be/src/vec/sink/writer/iceberg/partition_transformers.h +++ b/be/src/vec/sink/writer/iceberg/partition_transformers.h @@ -153,8 +153,8 @@ class StringTruncatePartitionColumnTransform : public PartitionColumnTransform { ColumnPtr string_column_ptr; ColumnPtr null_map_column_ptr; bool is_nullable = false; - if (auto* nullable_column = - check_and_get_column(column_with_type_and_name.column)) { + if (const auto* nullable_column = + check_and_get_column(column_with_type_and_name.column.get())) { null_map_column_ptr = nullable_column->get_null_map_column_ptr(); string_column_ptr = nullable_column->get_nested_column_ptr(); is_nullable = true; @@ -211,7 +211,7 @@ class IntegerTruncatePartitionColumnTransform : public PartitionColumnTransform //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -270,7 +270,7 @@ class BigintTruncatePartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -332,8 +332,8 @@ class DecimalTruncatePartitionColumnTransform : public PartitionColumnTransform ColumnPtr column_ptr; ColumnPtr null_map_column_ptr; bool is_nullable = false; - if (auto* nullable_column = - check_and_get_column(column_with_type_and_name.column)) { + if (const auto* nullable_column = + check_and_get_column(column_with_type_and_name.column.get())) { null_map_column_ptr = nullable_column->get_null_map_column_ptr(); column_ptr = nullable_column->get_nested_column_ptr(); is_nullable = true; @@ -342,7 +342,7 @@ class DecimalTruncatePartitionColumnTransform : public PartitionColumnTransform is_nullable = false; } - const auto* const decimal_col = check_and_get_column>(column_ptr); + const auto* const decimal_col = check_and_get_column>(column_ptr.get()); const auto& vec_src = decimal_col->get_data(); auto col_res = ColumnDecimal::create(vec_src.size(), decimal_col->get_scale()); @@ -391,7 +391,7 @@ class IntBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -454,7 +454,7 @@ class BigintBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -518,7 +518,7 @@ class DecimalBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -597,7 +597,7 @@ class DateBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -665,7 +665,7 @@ class TimestampBucketPartitionColumnTransform : public PartitionColumnTransform //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -746,7 +746,7 @@ class StringBucketPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -811,7 +811,7 @@ class DateYearPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -883,7 +883,7 @@ class TimestampYearPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -955,7 +955,7 @@ class DateMonthPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1027,7 +1027,7 @@ class TimestampMonthPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1099,7 +1099,7 @@ class DateDayPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1177,7 +1177,7 @@ class TimestampDayPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1254,7 +1254,7 @@ class TimestampHourPartitionColumnTransform : public PartitionColumnTransform { //1) get the target column ptr const ColumnWithTypeAndName& column_with_type_and_name = block.get_by_position(column_pos); ColumnPtr column_ptr = column_with_type_and_name.column->convert_to_full_column_if_const(); - CHECK(column_ptr != nullptr); + CHECK(column_ptr); //2) get the input data from block ColumnPtr null_map_column_ptr; @@ -1328,7 +1328,7 @@ class VoidPartitionColumnTransform : public PartitionColumnTransform { ColumnPtr column_ptr; ColumnPtr null_map_column_ptr; if (auto* nullable_column = - check_and_get_column(column_with_type_and_name.column)) { + check_and_get_column(column_with_type_and_name.column.get())) { null_map_column_ptr = nullable_column->get_null_map_column_ptr(); column_ptr = nullable_column->get_nested_column_ptr(); } else { diff --git a/be/src/vec/sink/writer/iceberg/viceberg_table_writer.cpp b/be/src/vec/sink/writer/iceberg/viceberg_table_writer.cpp index 29c97b59ea4dba..608afced8d92db 100644 --- a/be/src/vec/sink/writer/iceberg/viceberg_table_writer.cpp +++ b/be/src/vec/sink/writer/iceberg/viceberg_table_writer.cpp @@ -410,7 +410,7 @@ std::any VIcebergTableWriter::_get_iceberg_partition_value( int position) { //1) get the partition column ptr ColumnPtr col_ptr = partition_column.column->convert_to_full_column_if_const(); - CHECK(col_ptr != nullptr); + CHECK(col_ptr); if (col_ptr->is_nullable()) { const ColumnNullable* nullable_column = reinterpret_cast(col_ptr.get()); diff --git a/be/src/vec/utils/util.hpp b/be/src/vec/utils/util.hpp index 8d17b2787a53da..485d81311ba538 100644 --- a/be/src/vec/utils/util.hpp +++ b/be/src/vec/utils/util.hpp @@ -197,7 +197,7 @@ inline void change_null_to_true(ColumnPtr column, ColumnPtr argument = nullptr) data[i] |= null_map[i]; } memset(null_map, 0, rows); - } else if (argument != nullptr && argument->has_null()) { + } else if (argument && argument->has_null()) { const auto* __restrict null_map = assert_cast(argument.get())->get_null_map_data().data(); auto* __restrict data = diff --git a/be/test/vec/columns/common_column_test.h b/be/test/vec/columns/common_column_test.h index 8e1b86c0168f99..b70ac660136216 100644 --- a/be/test/vec/columns/common_column_test.h +++ b/be/test/vec/columns/common_column_test.h @@ -989,7 +989,7 @@ class CommonColumnTest : public ::testing::Test { // check size EXPECT_EQ(ptr->size(), *cl); // check ptr is not the same - EXPECT_NE(ptr.get(), source_column); + EXPECT_NE(ptr.get(), source_column.get()); // check after clone_resized with assert_res auto ser_col = ColumnString::create(); @@ -1042,7 +1042,7 @@ class CommonColumnTest : public ::testing::Test { // check size EXPECT_EQ(ptr->size(), insert_size); // check ptr is not the same - EXPECT_NE(ptr.get(), source_column); + EXPECT_NE(ptr.get(), source_column.get()); // check after cut with assert_res auto ser_col = ColumnString::create(); ser_col->reserve(ptr->size()); @@ -1095,7 +1095,7 @@ class CommonColumnTest : public ::testing::Test { // check size EXPECT_EQ(ptr->size(), insert_size); // check ptr is not the same - EXPECT_NE(ptr.get(), source_column); + EXPECT_NE(ptr.get(), source_column.get()); // check after cut with assert_res auto ser_col = ColumnString::create(); ser_col->reserve(ptr->size()); diff --git a/be/test/vec/data_types/from_string_test.cpp b/be/test/vec/data_types/from_string_test.cpp index 01515b805d9be0..eb8b00ab16f69c 100644 --- a/be/test/vec/data_types/from_string_test.cpp +++ b/be/test/vec/data_types/from_string_test.cpp @@ -203,7 +203,7 @@ TEST(FromStringTest, ScalaWrapperFieldVsDataType) { string test_str = std::get<1>(type_pair)[i]; // data_type from_string ReadBuffer rb_test(test_str.data(), test_str.size()); - Status st = data_type_ptr->from_string(rb_test, col); + Status st = data_type_ptr->from_string(rb_test, col.get()); if (std::get<3>(type_pair)[i].empty()) { EXPECT_EQ(st.ok(), false); std::cout << "deserialize failed: " << st.to_json() << std::endl; @@ -256,11 +256,11 @@ TEST(FromStringTest, ScalaWrapperFieldVsDataType) { ReadBuffer rand_rb(rand_date.data(), rand_date.size()); auto col = data_type_ptr->create_column(); - Status st = data_type_ptr->from_string(min_rb, col); + Status st = data_type_ptr->from_string(min_rb, col.get()); EXPECT_EQ(st.ok(), true); - st = data_type_ptr->from_string(max_rb, col); + st = data_type_ptr->from_string(max_rb, col.get()); EXPECT_EQ(st.ok(), true); - st = data_type_ptr->from_string(rand_rb, col); + st = data_type_ptr->from_string(rand_rb, col.get()); EXPECT_EQ(st.ok(), true); string min_s_d = data_type_ptr->to_string(*col, 0); @@ -319,7 +319,7 @@ TEST(FromStringTest, ScalaWrapperFieldVsDataType) { string rand_ip = rand_wf->to_string(); ReadBuffer rand_rb(rand_ip.data(), rand_ip.size()); auto col = data_type_ptr->create_column(); - st = data_type_ptr->from_string(rand_rb, col); + st = data_type_ptr->from_string(rand_rb, col.get()); EXPECT_EQ(st.ok(), true); string rand_s_d = data_type_ptr->to_string(*col, 0); rtrim(rand_ip); @@ -336,7 +336,7 @@ TEST(FromStringTest, ScalaWrapperFieldVsDataType) { EXPECT_EQ(st.ok(), false); ReadBuffer rand_rb(pair.second.data(), pair.second.size()); auto col = data_type_ptr->create_column(); - st = data_type_ptr->from_string(rand_rb, col); + st = data_type_ptr->from_string(rand_rb, col.get()); EXPECT_EQ(st.ok(), false); } } diff --git a/be/test/vec/data_types/serde/data_type_serde_text_test.cpp b/be/test/vec/data_types/serde/data_type_serde_text_test.cpp index 2affbc36c86ab3..b65b3fc6f63d2c 100644 --- a/be/test/vec/data_types/serde/data_type_serde_text_test.cpp +++ b/be/test/vec/data_types/serde/data_type_serde_text_test.cpp @@ -510,7 +510,7 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); - Status status = array_data_type_ptr->from_string(rb, col2); + Status status = array_data_type_ptr->from_string(rb, col2.get()); EXPECT_EQ(status.ok(), true); auto ser_col = ColumnString::create(); ser_col->reserve(1); @@ -661,7 +661,7 @@ TEST(TextSerde, ComplexTypeSerdeTextTest) { { ReadBuffer rb(rand_str.data(), rand_str.size()); std::cout << "from string rb: " << rb.to_string() << std::endl; - Status stat = map_data_type_ptr->from_string(rb, col2); + Status stat = map_data_type_ptr->from_string(rb, col2.get()); std::cout << stat.to_json() << std::endl; auto ser_col = ColumnString::create(); ser_col->reserve(1); @@ -840,7 +840,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); auto col2 = array_data_type_ptr->create_column(); - Status status = array_data_type_ptr->from_string(rb, col2); + Status status = array_data_type_ptr->from_string(rb, col2.get()); if (expect_from_string_str == "") { EXPECT_EQ(status.ok(), false); std::cout << "test from_string: " << status.to_json() << std::endl; @@ -995,7 +995,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); auto col2 = array_data_type_ptr->create_column(); - Status status = array_data_type_ptr->from_string(rb, col2); + Status status = array_data_type_ptr->from_string(rb, col2.get()); if (expect_from_string_str == "") { EXPECT_EQ(status.ok(), false); std::cout << "test from_string: " << status.to_json() << std::endl; @@ -1213,7 +1213,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); auto col2 = map_data_type_ptr->create_column(); - Status status = map_data_type_ptr->from_string(rb, col2); + Status status = map_data_type_ptr->from_string(rb, col2.get()); if (expect_from_string_str == "") { EXPECT_EQ(status.ok(), false); std::cout << "test from_string: " << status.to_json() << std::endl; @@ -1354,7 +1354,7 @@ TEST(TextSerde, ComplexTypeWithNestedSerdeTextTest) { // from_string ReadBuffer rb(rand_str.data(), rand_str.size()); auto col2 = array_data_type_ptr->create_column(); - Status status = array_data_type_ptr->from_string(rb, col2); + Status status = array_data_type_ptr->from_string(rb, col2.get()); if (expect_from_string_str == "") { EXPECT_EQ(status.ok(), false); std::cout << "test from_string: " << status.to_json() << std::endl; diff --git a/be/test/vec/function/function_test_util.h b/be/test/vec/function/function_test_util.h index c33a1d64f83111..a3809bf8ec6a48 100644 --- a/be/test/vec/function/function_test_util.h +++ b/be/test/vec/function/function_test_util.h @@ -315,7 +315,7 @@ Status check_function(const std::string& func_name, const InputTypeSet& input_ty // 3. check the result of function ColumnPtr column = block.get_columns()[result]; - EXPECT_TRUE(column != nullptr); + EXPECT_TRUE(column); for (int i = 0; i < row_size; ++i) { // update current line diff --git a/be/test/vec/olap/char_type_padding_test.cpp b/be/test/vec/olap/char_type_padding_test.cpp index 0e4879e46a6990..dfdfea3026ecd0 100644 --- a/be/test/vec/olap/char_type_padding_test.cpp +++ b/be/test/vec/olap/char_type_padding_test.cpp @@ -40,10 +40,10 @@ TEST(CharTypePaddingTest, CharTypePaddingFullTest) { for (size_t i = 0; i < rows; i++) { input->insert_data(str.data(), str.length()); } - EXPECT_FALSE(ConvertorChar::should_padding(input, str.length())); + EXPECT_FALSE(ConvertorChar::should_padding(input.get(), str.length())); input->insert_data(str.data(), str.length() - 1); - EXPECT_TRUE(ConvertorChar::should_padding(input, str.length())); + EXPECT_TRUE(ConvertorChar::should_padding(input.get(), str.length())); } TEST(CharTypePaddingTest, CharTypePaddingDataTest) { @@ -56,7 +56,7 @@ TEST(CharTypePaddingTest, CharTypePaddingDataTest) { input->insert_data(str.data(), str.length() - i); } - auto output = ConvertorChar::clone_and_padding(input, str.length()); + auto output = ConvertorChar::clone_and_padding(input.get(), str.length()); for (int i = 0; i < rows; i++) { auto cell = output->get_data_at(i).to_string(); From 014f84accebfb20f0fcf4debdfd6c768b1cb8f69 Mon Sep 17 00:00:00 2001 From: zclllyybb Date: Mon, 23 Dec 2024 11:58:01 +0800 Subject: [PATCH 49/82] [Refactor](function) make all Datetime arithmetic operation overflow lead to exception in BE (#45265) ### What problem does this PR solve? Issue Number: close #xxx Related PR: #xxx Problem Summary: 1. totally refactored `FunctionDateOrDateTimeComputation`. removed some unnecessary function and template. simplified some template calculations. 2. All Datetime arithmetic operation overflow will lead to exception now. before for nullable input it will get `NULL` result see: ```sql mysql> select date_add('5000-10-10', interval 10000 year); +------------------------------------------------+ | years_add(cast('5000-10-10' as DATEV2), 10000) | +------------------------------------------------+ | NULL | +------------------------------------------------+ 1 row in set (0.10 sec) ``` now: ```sql ERROR 1105 (HY000): errCode = 2, detailMessage = (xxx)[E-218][E-218] Operation years_add of 5000-10-10, 10000 out of range ``` ### Release note All Datetime arithmetic operation overflow will lead to exception now. ### Check List (For Author) - Test - [ ] Regression test - [ ] Unit Test - [ ] Manual test (add detailed scripts or steps below) - [x] No need to test or manual test. Explain why: - [ ] This is a refactor/code format and no logic has been changed. - [x] Previous test can cover this change. - [ ] No code files have been changed. - [ ] Other reason - Behavior changed: - [ ] No. - [x] Yes. - Does this need documentation? - [x] No. - [ ] Yes. ### Check List (For Reviewer who merge this PR) - [ ] Confirm the release note - [ ] Confirm test cases - [ ] Confirm document - [ ] Add branch pick label --- be/src/util/datetype_cast.hpp | 7 +- be/src/vec/common/typeid_cast.h | 3 - .../functions/array/function_array_range.cpp | 11 +- .../function_date_or_datetime_computation.cpp | 3 +- .../function_date_or_datetime_computation.h | 832 +++++++----------- ...nction_date_or_datetime_computation_v2.cpp | 4 +- be/src/vec/functions/function_helpers.h | 18 +- be/test/vec/function/function_test_util.h | 2 +- be/test/vec/function/function_time_test.cpp | 82 +- .../test_date_function_const.groovy | 2 +- ...te_or_datetime_computation_negative.groovy | 82 +- 11 files changed, 471 insertions(+), 575 deletions(-) diff --git a/be/src/util/datetype_cast.hpp b/be/src/util/datetype_cast.hpp index 495631ea7e376c..5c187ded7b729c 100644 --- a/be/src/util/datetype_cast.hpp +++ b/be/src/util/datetype_cast.hpp @@ -29,8 +29,10 @@ /* * We use these function family to clarify our types of datelike type. for example: * DataTypeDate -------------------> ColumnDate -----------------------> Int64 - * | TypeToColumn ValueTypeOfColumn - * | TypeToValueType + * | | TypeToColumn ValueTypeOfColumn | + * | ↘--------------------------------------------------------------↗ + * | ::FieldType + * ↓ TypeToValueType * VecDateTimeValue */ namespace doris::date_cast { @@ -102,6 +104,7 @@ constexpr bool IsV1() { std::is_same_v); } +// only for datelike types. template constexpr bool IsV2() { return !IsV1(); diff --git a/be/src/vec/common/typeid_cast.h b/be/src/vec/common/typeid_cast.h index e135ef3309d2ec..3f81586a707c33 100644 --- a/be/src/vec/common/typeid_cast.h +++ b/be/src/vec/common/typeid_cast.h @@ -20,14 +20,11 @@ #pragma once -#include #include -#include #include #include "common/exception.h" #include "common/status.h" -#include "vec/common/demangle.h" /** Checks type by comparing typeid. * The exact match of the type is checked. That is, cast to the ancestor will be unsuccessful. diff --git a/be/src/vec/functions/array/function_array_range.cpp b/be/src/vec/functions/array/function_array_range.cpp index 0980587660b20a..ffb5987c744d1f 100644 --- a/be/src/vec/functions/array/function_array_range.cpp +++ b/be/src/vec/functions/array/function_array_range.cpp @@ -16,10 +16,10 @@ // under the License. #include -#include #include #include +#include #include #include @@ -41,11 +41,11 @@ #include "vec/data_types/data_type_date_time.h" #include "vec/data_types/data_type_nullable.h" #include "vec/data_types/data_type_number.h" +#include "vec/data_types/data_type_time_v2.h" #include "vec/functions/function.h" #include "vec/functions/function_date_or_datetime_computation.h" #include "vec/functions/simple_function_factory.h" #include "vec/runtime/vdatetime_value.h" -#include "vec/utils/util.hpp" namespace doris { class FunctionContext; @@ -229,10 +229,9 @@ struct RangeImplUtil { dest_nested_null_map.push_back(0); offset++; move++; - idx = doris::vectorized::date_time_add< - UNIT::value, DateV2Value, - DateV2Value, DateTimeV2>(idx, step_row, - is_null); + idx = doris::vectorized::date_time_add(idx, step_row, + is_null); } dest_offsets.push_back(offset); } diff --git a/be/src/vec/functions/function_date_or_datetime_computation.cpp b/be/src/vec/functions/function_date_or_datetime_computation.cpp index f6bf806ad46c1d..ece897d6dcbf7c 100644 --- a/be/src/vec/functions/function_date_or_datetime_computation.cpp +++ b/be/src/vec/functions/function_date_or_datetime_computation.cpp @@ -55,7 +55,7 @@ using FunctionWeeksDiff = using FunctionHoursDiff = FunctionDateOrDateTimeComputation>; using FunctionMinutesDiff = - FunctionDateOrDateTimeComputation>; + FunctionDateOrDateTimeComputation>; using FunctionSecondsDiff = FunctionDateOrDateTimeComputation>; @@ -68,6 +68,7 @@ struct NowFunctionName { static constexpr auto name = "now"; }; +//TODO: remove the inter-layer CurrentDateTimeImpl using FunctionNow = FunctionCurrentDateOrDateTime>; using FunctionNowWithPrecision = diff --git a/be/src/vec/functions/function_date_or_datetime_computation.h b/be/src/vec/functions/function_date_or_datetime_computation.h index 224bf49179177c..8165f57881b839 100644 --- a/be/src/vec/functions/function_date_or_datetime_computation.h +++ b/be/src/vec/functions/function_date_or_datetime_computation.h @@ -17,13 +17,12 @@ #pragma once -#include -#include - #include #include +#include #include #include +#include #include #include @@ -32,7 +31,6 @@ #include "common/exception.h" #include "common/logging.h" #include "common/status.h" -#include "fmt/format.h" #include "runtime/runtime_state.h" #include "udf/udf.h" #include "util/binary_cast.hpp" @@ -45,12 +43,10 @@ #include "vec/columns/columns_number.h" #include "vec/common/assert_cast.h" #include "vec/common/pod_array_fwd.h" -#include "vec/common/typeid_cast.h" #include "vec/core/block.h" #include "vec/core/column_numbers.h" #include "vec/core/column_with_type_and_name.h" #include "vec/core/columns_with_type_and_name.h" -#include "vec/core/field.h" #include "vec/core/types.h" #include "vec/data_types/data_type.h" #include "vec/data_types/data_type_date.h" @@ -67,73 +63,57 @@ namespace doris::vectorized { -template -extern ResultType date_time_add(const Arg& t, Int64 delta, bool& is_null) { - auto ts_value = binary_cast(t); +/// because all these functions(xxx_add/xxx_sub) defined in FE use Integer as the second value +/// so Int32 as delta is enough. For upstream(FunctionDateOrDateTimeComputation) we also could use Int32. + +template +ReturnNativeType date_time_add(const InputNativeType& t, Int32 delta, bool& is_null) { + using DateValueType = date_cast::TypeToValueTypeV; + using ResultDateValueType = date_cast::TypeToValueTypeV; + // e.g.: for DatatypeDatetimeV2, cast from u64 to DateV2Value + auto ts_value = binary_cast(t); TimeInterval interval(unit, delta, false); - if constexpr (std::is_same_v || - std::is_same_v) { + if constexpr (std::is_same_v) { is_null = !(ts_value.template date_add_interval(interval)); - - return binary_cast(ts_value); + // here DateValueType = ResultDateValueType + return binary_cast(ts_value); } else { + // this is for HOUR/MINUTE/SECOND/MS_ADD for datev2. got datetimev2 but not datev2. so need this two-arg reload to assign. ResultDateValueType res; is_null = !(ts_value.template date_add_interval(interval, res)); - return binary_cast(res); + return binary_cast(res); } } -#define ADD_TIME_FUNCTION_IMPL(CLASS, NAME, UNIT) \ - template \ - struct CLASS { \ - using ReturnType = std::conditional_t< \ - date_cast::IsV1(), DataTypeDateTime, \ - std::conditional_t< \ - std::is_same_v, \ - std::conditional_t, \ - DataTypeDateTimeV2>>; \ - using ReturnNativeType = \ - date_cast::ValueTypeOfColumnV>; \ - using InputNativeType = date_cast::ValueTypeOfColumnV>; \ - static constexpr auto name = #NAME; \ - static constexpr auto is_nullable = true; \ - static inline ReturnNativeType execute(const InputNativeType& t, Int64 delta, \ - bool& is_null) { \ - if constexpr (std::is_same_v || \ - std::is_same_v) { \ - return date_time_add(t, delta, \ - is_null); \ - } else if constexpr (std::is_same_v) { \ - if constexpr (TimeUnit::UNIT == TimeUnit::HOUR || \ - TimeUnit::UNIT == TimeUnit::MINUTE || \ - TimeUnit::UNIT == TimeUnit::SECOND || \ - TimeUnit::UNIT == TimeUnit::SECOND_MICROSECOND) { \ - return date_time_add, \ - DateV2Value, ReturnNativeType>( \ - t, delta, is_null); \ - } else { \ - return date_time_add, \ - DateV2Value, ReturnNativeType>(t, delta, \ - is_null); \ - } \ - \ - } else { \ - return date_time_add, \ - DateV2Value, ReturnNativeType>(t, delta, \ - is_null); \ - } \ - } \ - \ - static DataTypes get_variadic_argument_types() { \ - return {std::make_shared(), std::make_shared()}; \ - } \ +#define ADD_TIME_FUNCTION_IMPL(CLASS, NAME, UNIT) \ + template \ + struct CLASS { \ + /* for V1 type all return Datetime. for V2 type, if unit <= hour, increase to DatetimeV2 */ \ + using ReturnType = std::conditional_t< \ + date_cast::IsV1(), DataTypeDateTime, \ + std::conditional_t< \ + std::is_same_v, \ + std::conditional_t, \ + DataTypeDateTimeV2>>; \ + using ReturnNativeType = ReturnType::FieldType; \ + using InputNativeType = ArgType::FieldType; \ + static constexpr auto name = #NAME; \ + static constexpr auto is_nullable = true; \ + static inline ReturnNativeType execute(const InputNativeType& t, Int32 delta, \ + bool& is_null) { \ + return date_time_add(t, delta, is_null); \ + } \ + \ + static DataTypes get_variadic_argument_types() { \ + return {std::make_shared(), std::make_shared()}; \ + } \ } ADD_TIME_FUNCTION_IMPL(AddMicrosecondsImpl, microseconds_add, MICROSECOND); @@ -146,46 +126,32 @@ ADD_TIME_FUNCTION_IMPL(AddWeeksImpl, weeks_add, WEEK); ADD_TIME_FUNCTION_IMPL(AddMonthsImpl, months_add, MONTH); ADD_TIME_FUNCTION_IMPL(AddYearsImpl, years_add, YEAR); -template +template struct AddQuartersImpl { using ReturnType = - std::conditional_t || - std::is_same_v, + std::conditional_t || + std::is_same_v, DataTypeDateTime, - std::conditional_t, + std::conditional_t, DataTypeDateV2, DataTypeDateTimeV2>>; - using InputNativeType = std::conditional_t< - std::is_same_v || std::is_same_v, - Int64, std::conditional_t, UInt32, UInt64>>; - using ReturnNativeType = std::conditional_t< - std::is_same_v || std::is_same_v, - Int64, std::conditional_t, UInt32, UInt64>>; + using InputNativeType = ArgType::FieldType; + using ReturnNativeType = ReturnType::FieldType; static constexpr auto name = "quarters_add"; static constexpr auto is_nullable = true; - static inline ReturnNativeType execute(const InputNativeType& t, Int64 delta, bool& is_null) { - if constexpr (std::is_same_v || - std::is_same_v) { - return date_time_add(t, delta, is_null); - } else if constexpr (std::is_same_v) { - return date_time_add, - DateV2Value, ReturnNativeType>(t, delta, is_null); - } else { - return date_time_add, - DateV2Value, ReturnNativeType>(t, delta, - is_null); - } + static inline ReturnNativeType execute(const InputNativeType& t, Int32 delta, bool& is_null) { + return date_time_add(t, 3 * delta, is_null); } - static DataTypes get_variadic_argument_types() { return {std::make_shared()}; } + static DataTypes get_variadic_argument_types() { return {std::make_shared()}; } }; template struct SubtractIntervalImpl { using ReturnType = typename Transform::ReturnType; using InputNativeType = typename Transform::InputNativeType; + using ReturnNativeType = typename Transform::ReturnNativeType; static constexpr auto is_nullable = true; - static inline Int64 execute(const InputNativeType& t, Int64 delta, bool& is_null) { + static inline ReturnNativeType execute(const InputNativeType& t, Int32 delta, bool& is_null) { return Transform::execute(t, -delta, is_null); } @@ -244,57 +210,49 @@ struct SubtractYearsImpl : SubtractIntervalImpl, DateType static constexpr auto name = "years_sub"; }; -#define DECLARE_DATE_FUNCTIONS(NAME, FN_NAME, RETURN_TYPE, STMT) \ - template \ - struct NAME { \ - using ArgType1 = std::conditional_t< \ - std::is_same_v, UInt32, \ - std::conditional_t, UInt64, Int64>>; \ - using ArgType2 = std::conditional_t< \ - std::is_same_v, UInt32, \ - std::conditional_t, UInt64, Int64>>; \ - using DateValueType1 = std::conditional_t< \ - std::is_same_v, DateV2Value, \ - std::conditional_t, \ - DateV2Value, VecDateTimeValue>>; \ - using DateValueType2 = std::conditional_t< \ - std::is_same_v, DateV2Value, \ - std::conditional_t, \ - DateV2Value, VecDateTimeValue>>; \ - using ReturnType = RETURN_TYPE; \ - static constexpr auto name = #FN_NAME; \ - static constexpr auto is_nullable = false; \ - static inline ReturnType::FieldType execute(const ArgType1& t0, const ArgType2& t1, \ - bool& is_null) { \ - const auto& ts0 = reinterpret_cast(t0); \ - const auto& ts1 = reinterpret_cast(t1); \ - is_null = !ts0.is_valid_date() || !ts1.is_valid_date(); \ - return STMT; \ - } \ - static DataTypes get_variadic_argument_types() { \ - return {std::make_shared(), std::make_shared()}; \ - } \ +#define DECLARE_DATE_FUNCTIONS(NAME, FN_NAME, RETURN_TYPE, STMT) \ + template \ + struct NAME { \ + using NativeType1 = DateType1::FieldType; \ + using NativeType2 = DateType2::FieldType; \ + using DateValueType1 = date_cast::TypeToValueTypeV; \ + using DateValueType2 = date_cast::TypeToValueTypeV; \ + using ReturnType = RETURN_TYPE; \ + \ + static constexpr auto name = #FN_NAME; \ + static constexpr auto is_nullable = false; \ + static inline ReturnType::FieldType execute(const NativeType1& t0, const NativeType2& t1, \ + bool& is_null) { \ + const auto& ts0 = reinterpret_cast(t0); \ + const auto& ts1 = reinterpret_cast(t1); \ + is_null = !ts0.is_valid_date() || !ts1.is_valid_date(); \ + return (STMT); \ + } \ + static DataTypes get_variadic_argument_types() { \ + return {std::make_shared(), std::make_shared()}; \ + } \ }; + DECLARE_DATE_FUNCTIONS(DateDiffImpl, datediff, DataTypeInt32, (ts0.daynr() - ts1.daynr())); // DECLARE_DATE_FUNCTIONS(TimeDiffImpl, timediff, DataTypeTime, ts0.second_diff(ts1)); -// Expands to +// Expands to below here because it use Time type which need some special deal. template struct TimeDiffImpl { - using DateValueType1 = date_cast::TypeToValueTypeV; - using DateValueType2 = date_cast::TypeToValueTypeV; - using ArgType1 = date_cast::ValueTypeOfColumnV>; - using ArgType2 = date_cast::ValueTypeOfColumnV>; + using NativeType1 = date_cast::TypeToValueTypeV; + using NativeType2 = date_cast::TypeToValueTypeV; + using ArgType1 = DateType1::FieldType; + using ArgType2 = DateType2::FieldType; static constexpr bool UsingTimev2 = date_cast::IsV2() || date_cast::IsV2(); - using ReturnType = DataTypeTimeV2; + using ReturnType = DataTypeTimeV2; // TimeV1Type also use double as native type. same as v2. static constexpr auto name = "timediff"; static constexpr int64_t limit_value = 3020399000000; // 838:59:59 convert to microsecond static inline ReturnType::FieldType execute(const ArgType1& t0, const ArgType2& t1, bool& is_null) { - const auto& ts0 = reinterpret_cast(t0); - const auto& ts1 = reinterpret_cast(t1); + const auto& ts0 = reinterpret_cast(t0); + const auto& ts1 = reinterpret_cast(t1); is_null = !ts0.is_valid_date() || !ts1.is_valid_date(); if constexpr (UsingTimev2) { // refer to https://dev.mysql.com/doc/refman/5.7/en/time.html @@ -318,381 +276,138 @@ struct TimeDiffImpl { #define TIME_DIFF_FUNCTION_IMPL(CLASS, NAME, UNIT) \ DECLARE_DATE_FUNCTIONS(CLASS, NAME, DataTypeInt64, datetime_diff(ts1, ts0)) +// all these functions implemented by datediff TIME_DIFF_FUNCTION_IMPL(YearsDiffImpl, years_diff, YEAR); TIME_DIFF_FUNCTION_IMPL(MonthsDiffImpl, months_diff, MONTH); TIME_DIFF_FUNCTION_IMPL(WeeksDiffImpl, weeks_diff, WEEK); TIME_DIFF_FUNCTION_IMPL(DaysDiffImpl, days_diff, DAY); TIME_DIFF_FUNCTION_IMPL(HoursDiffImpl, hours_diff, HOUR); -TIME_DIFF_FUNCTION_IMPL(MintueSDiffImpl, minutes_diff, MINUTE); +TIME_DIFF_FUNCTION_IMPL(MintuesDiffImpl, minutes_diff, MINUTE); TIME_DIFF_FUNCTION_IMPL(SecondsDiffImpl, seconds_diff, SECOND); TIME_DIFF_FUNCTION_IMPL(MilliSecondsDiffImpl, milliseconds_diff, MILLISECOND); TIME_DIFF_FUNCTION_IMPL(MicroSecondsDiffImpl, microseconds_diff, MICROSECOND); -#define TIME_FUNCTION_TWO_ARGS_IMPL(CLASS, NAME, FUNCTION, RETURN_TYPE) \ - template \ - struct CLASS { \ - using ArgType = std::conditional_t< \ - std::is_same_v, UInt32, \ - std::conditional_t, UInt64, Int64>>; \ - using DateValueType = std::conditional_t< \ - std::is_same_v, DateV2Value, \ - std::conditional_t, \ - DateV2Value, VecDateTimeValue>>; \ - using ReturnType = RETURN_TYPE; \ - static constexpr auto name = #NAME; \ - static constexpr auto is_nullable = false; \ - static inline ReturnType::FieldType execute(const ArgType& t0, const Int32 mode, \ - bool& is_null) { \ - const auto& ts0 = reinterpret_cast(t0); \ - is_null = !ts0.is_valid_date(); \ - return ts0.FUNCTION; \ - } \ - static DataTypes get_variadic_argument_types() { \ - return {std::make_shared(), std::make_shared()}; \ - } \ +#define TIME_FUNCTION_TWO_ARGS_IMPL(CLASS, NAME, FUNCTION, RETURN_TYPE) \ + template \ + struct CLASS { \ + using ArgType = DateType::FieldType; \ + using DateValueType = date_cast::TypeToValueTypeV; \ + using ReturnType = RETURN_TYPE; \ + \ + static constexpr auto name = #NAME; \ + static constexpr auto is_nullable = false; \ + static inline ReturnType::FieldType execute(const ArgType& t0, const Int32 mode, \ + bool& is_null) { \ + const auto& ts0 = reinterpret_cast(t0); \ + is_null = !ts0.is_valid_date(); \ + return ts0.FUNCTION; \ + } \ + static DataTypes get_variadic_argument_types() { \ + return {std::make_shared(), std::make_shared()}; \ + } \ } TIME_FUNCTION_TWO_ARGS_IMPL(ToYearWeekTwoArgsImpl, yearweek, year_week(mysql_week_mode(mode)), DataTypeInt32); TIME_FUNCTION_TWO_ARGS_IMPL(ToWeekTwoArgsImpl, week, week(mysql_week_mode(mode)), DataTypeInt8); -template +// only use for FunctionDateOrDateTimeComputation. FromTypes are NativeTypes. +template struct DateTimeOp { - // use for (DateTime, DateTime) -> other_type - static void vector_vector(const PaddedPODArray& vec_from0, - const PaddedPODArray& vec_from1, - PaddedPODArray& vec_to, NullMap& null_map) { - size_t size = vec_from0.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - // here reinterpret_cast is used to convert uint8& to bool&, - // otherwise it will be implicitly converted to bool, causing the rvalue to fail to match the lvalue. - // the same goes for the following. - vec_to[i] = Transform::execute(vec_from0[i], vec_from1[i], - reinterpret_cast(null_map[i])); - } - } - static void vector_vector(const PaddedPODArray& vec_from0, - const PaddedPODArray& vec_from1, - PaddedPODArray& vec_to) { - size_t size = vec_from0.size(); - vec_to.resize(size); - - bool invalid = true; - for (size_t i = 0; i < size; ++i) { - // here reinterpret_cast is used to convert uint8& to bool&, - // otherwise it will be implicitly converted to bool, causing the rvalue to fail to match the lvalue. - // the same goes for the following. - vec_to[i] = Transform::execute(vec_from0[i], vec_from1[i], invalid); - - if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, vec_from0[i], vec_from1[i]); - } + using NativeType0 = DataType0::FieldType; + using NativeType1 = DataType1::FieldType; + using ValueType0 = date_cast::TypeToValueTypeV; + // arg1 maybe just delta value(e.g. DataTypeInt32, not datelike type) + constexpr static bool CastType1 = std::is_same_v || + std::is_same_v || + std::is_same_v || + std::is_same_v; + + static void throw_out_of_bound(NativeType0 arg0, NativeType1 arg1) { + auto value0 = binary_cast(arg0); + char buf0[40]; + char* end0 = value0.to_string(buf0); + if constexpr (CastType1) { + auto value1 = binary_cast>(arg1); + char buf1[40]; + char* end1 = value1.to_string(buf1); + throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} of {}, {} out of range", + Transform::name, std::string_view {buf0, end0 - 1}, + std::string_view {buf1, end1 - 1}); // minus 1 to skip /0 + } else { + throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} of {}, {} out of range", + Transform::name, std::string_view {buf0, end0 - 1}, arg1); } } - // use for (DateTime, int32) -> other_type - static void vector_vector(const PaddedPODArray& vec_from0, - const PaddedPODArray& vec_from1, - PaddedPODArray& vec_to, NullMap& null_map) { - size_t size = vec_from0.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) - vec_to[i] = Transform::execute(vec_from0[i], vec_from1[i], - reinterpret_cast(null_map[i])); - } - static void vector_vector(const PaddedPODArray& vec_from0, - const PaddedPODArray& vec_from1, - PaddedPODArray& vec_to) { + // execute on the null value's nested value may cause false positive exception, so use nullmaps to skip them. + static void vector_vector(const PaddedPODArray& vec_from0, + const PaddedPODArray& vec_from1, + PaddedPODArray& vec_to, const NullMap* nullmap0, + const NullMap* nullmap1) { size_t size = vec_from0.size(); vec_to.resize(size); + bool invalid = false; - bool invalid = true; for (size_t i = 0; i < size; ++i) { + if ((nullmap0 && (*nullmap0)[i]) || (nullmap1 && (*nullmap1)[i])) [[unlikely]] { + continue; + } vec_to[i] = Transform::execute(vec_from0[i], vec_from1[i], invalid); if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, vec_from0[i], vec_from1[i]); + throw_out_of_bound(vec_from0[i], vec_from1[i]); } } } - // use for (DateTime, const DateTime) -> other_type - static void vector_constant(const PaddedPODArray& vec_from, - PaddedPODArray& vec_to, NullMap& null_map, Int128& delta) { - size_t size = vec_from.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - vec_to[i] = - Transform::execute(vec_from[i], delta, reinterpret_cast(null_map[i])); + static void vector_constant(const PaddedPODArray& vec_from, + PaddedPODArray& vec_to, const NativeType1& delta, + const NullMap* nullmap0, const NullMap* nullmap1) { + if (nullmap1 && (*nullmap1)[0]) [[unlikely]] { + return; } - } - static void vector_constant(const PaddedPODArray& vec_from, - PaddedPODArray& vec_to, Int128& delta) { size_t size = vec_from.size(); vec_to.resize(size); + bool invalid = false; - bool invalid = true; for (size_t i = 0; i < size; ++i) { - vec_to[i] = Transform::execute(vec_from[i], delta, invalid); - - if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, vec_from[i], delta); + if (nullmap0 && (*nullmap0)[i]) [[unlikely]] { + continue; } - } - } - - // use for (DateTime, const ColumnNumber) -> other_type - static void vector_constant(const PaddedPODArray& vec_from, - PaddedPODArray& vec_to, NullMap& null_map, Int64 delta) { - size_t size = vec_from.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - vec_to[i] = - Transform::execute(vec_from[i], delta, reinterpret_cast(null_map[i])); - } - } - static void vector_constant(const PaddedPODArray& vec_from, - PaddedPODArray& vec_to, Int64 delta) { - size_t size = vec_from.size(); - vec_to.resize(size); - bool invalid = true; - - for (size_t i = 0; i < size; ++i) { vec_to[i] = Transform::execute(vec_from[i], delta, invalid); if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, vec_from[i], delta); + throw_out_of_bound(vec_from[i], delta); } } } - // use for (const DateTime, ColumnNumber) -> other_type - static void constant_vector(const FromType1& from, PaddedPODArray& vec_to, - NullMap& null_map, const IColumn& delta) { - size_t size = delta.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - vec_to[i] = Transform::execute(from, delta.get_int(i), - reinterpret_cast(null_map[i])); + static void constant_vector(const NativeType0& from, PaddedPODArray& vec_to, + const PaddedPODArray& delta, const NullMap* nullmap0, + const NullMap* nullmap1) { + if (nullmap0 && (*nullmap0)[0]) [[unlikely]] { + return; } - } - static void constant_vector(const FromType1& from, PaddedPODArray& vec_to, - const IColumn& delta) { size_t size = delta.size(); vec_to.resize(size); - bool invalid = true; + bool invalid = false; for (size_t i = 0; i < size; ++i) { - vec_to[i] = Transform::execute(from, delta.get_int(i), invalid); - - if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, from, delta.get_int(i)); + if (nullmap1 && (*nullmap1)[i]) [[unlikely]] { + continue; } - } - } - - static void constant_vector(const FromType1& from, PaddedPODArray& vec_to, - NullMap& null_map, const PaddedPODArray& delta) { - size_t size = delta.size(); - vec_to.resize(size); - null_map.resize_fill(size, false); - - for (size_t i = 0; i < size; ++i) { - vec_to[i] = Transform::execute(from, delta[i], reinterpret_cast(null_map[i])); - } - } - - static void constant_vector(const FromType1& from, PaddedPODArray& vec_to, - const PaddedPODArray& delta) { - size_t size = delta.size(); - vec_to.resize(size); - bool invalid = true; - - for (size_t i = 0; i < size; ++i) { vec_to[i] = Transform::execute(from, delta[i], invalid); if (UNLIKELY(invalid)) { - throw Exception(ErrorCode::OUT_OF_BOUND, "Operation {} {} {} out of range", - Transform::name, from, delta[i]); - } - } - } -}; - -template -struct DateTimeAddIntervalImpl { - static Status execute(Block& block, const ColumnNumbers& arguments, uint32_t result, - size_t input_rows_count) { - using ToType = typename Transform::ReturnType::FieldType; - using Op = DateTimeOp; - - const ColumnPtr source_col = remove_nullable(block.get_by_position(arguments[0]).column); - const auto is_nullable = block.get_by_position(result).type->is_nullable(); - if (const auto* sources = check_and_get_column>(source_col.get())) { - auto col_to = ColumnVector::create(); - auto delta_column_ptr = remove_nullable(block.get_by_position(arguments[1]).column); - const IColumn& delta_column = *delta_column_ptr; - - if (is_nullable) { - auto null_map = ColumnUInt8::create(input_rows_count, 0); - if (const auto* delta_const_column = - typeid_cast(&delta_column)) { - if (delta_const_column->get_field().get_type() == Field::Types::Int128) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - null_map->get_data(), - delta_const_column->get_field().get()); - } else if (delta_const_column->get_field().get_type() == Field::Types::Int64) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - null_map->get_data(), - delta_const_column->get_field().get()); - } else if (delta_const_column->get_field().get_type() == Field::Types::UInt64) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - null_map->get_data(), - delta_const_column->get_field().get()); - } else { - Op::vector_constant(sources->get_data(), col_to->get_data(), - null_map->get_data(), - delta_const_column->get_field().get()); - } - } else { - if (const auto* delta_vec_column0 = - check_and_get_column>(delta_column)) { - Op::vector_vector(sources->get_data(), delta_vec_column0->get_data(), - col_to->get_data(), null_map->get_data()); - } else { - const auto* delta_vec_column1 = - check_and_get_column>(delta_column); - DCHECK(delta_vec_column1 != nullptr); - Op::vector_vector(sources->get_data(), delta_vec_column1->get_data(), - col_to->get_data(), null_map->get_data()); - } - } - if (const auto* nullable_col = check_and_get_column( - block.get_by_position(arguments[0]).column.get())) { - NullMap& result_null_map = assert_cast(*null_map).get_data(); - const NullMap& src_null_map = - assert_cast(nullable_col->get_null_map_column()) - .get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); - } - if (const auto* nullable_col = check_and_get_column( - block.get_by_position(arguments[1]).column.get())) { - NullMap& result_null_map = assert_cast(*null_map).get_data(); - const NullMap& src_null_map = - assert_cast(nullable_col->get_null_map_column()) - .get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); - } - block.get_by_position(result).column = - ColumnNullable::create(std::move(col_to), std::move(null_map)); - } else { - if (const auto* delta_const_column = - typeid_cast(&delta_column)) { - if (delta_const_column->get_field().get_type() == Field::Types::Int128) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - delta_const_column->get_field().get()); - } else if (delta_const_column->get_field().get_type() == Field::Types::Int64) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - delta_const_column->get_field().get()); - } else if (delta_const_column->get_field().get_type() == Field::Types::UInt64) { - Op::vector_constant(sources->get_data(), col_to->get_data(), - delta_const_column->get_field().get()); - } else { - Op::vector_constant(sources->get_data(), col_to->get_data(), - delta_const_column->get_field().get()); - } - } else { - if (const auto* delta_vec_column0 = - check_and_get_column>(delta_column)) { - Op::vector_vector(sources->get_data(), delta_vec_column0->get_data(), - col_to->get_data()); - } else { - const auto* delta_vec_column1 = - check_and_get_column>(delta_column); - DCHECK(delta_vec_column1 != nullptr); - Op::vector_vector(sources->get_data(), delta_vec_column1->get_data(), - col_to->get_data()); - } - } - block.replace_by_position(result, std::move(col_to)); + throw_out_of_bound(from, delta[i]); } - } else if (const auto* sources_const = - check_and_get_column_const>(source_col.get())) { - auto col_to = ColumnVector::create(); - if (is_nullable) { - auto null_map = ColumnUInt8::create(input_rows_count, 0); - auto not_nullable_column_ptr_arg1 = - remove_nullable(block.get_by_position(arguments[1]).column); - if (const auto* delta_vec_column = check_and_get_column>( - *not_nullable_column_ptr_arg1)) { - Op::constant_vector(sources_const->template get_value(), - col_to->get_data(), null_map->get_data(), - delta_vec_column->get_data()); - } else { - Op::constant_vector(sources_const->template get_value(), - col_to->get_data(), null_map->get_data(), - *not_nullable_column_ptr_arg1); - } - if (const auto* nullable_col = check_and_get_column( - block.get_by_position(arguments[0]).column.get())) { - NullMap& result_null_map = assert_cast(*null_map).get_data(); - const NullMap& src_null_map = - assert_cast(nullable_col->get_null_map_column()) - .get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); - } - if (const auto* nullable_col = check_and_get_column( - block.get_by_position(arguments[1]).column.get())) { - NullMap& result_null_map = assert_cast(*null_map).get_data(); - const NullMap& src_null_map = - assert_cast(nullable_col->get_null_map_column()) - .get_data(); - - VectorizedUtils::update_null_map(result_null_map, src_null_map); - } - block.get_by_position(result).column = - ColumnNullable::create(std::move(col_to), std::move(null_map)); - } else { - if (const auto* delta_vec_column = check_and_get_column>( - *block.get_by_position(arguments[1]).column)) { - Op::constant_vector(sources_const->template get_value(), - col_to->get_data(), delta_vec_column->get_data()); - } else { - Op::constant_vector(sources_const->template get_value(), - col_to->get_data(), - *block.get_by_position(arguments[1]).column); - } - block.replace_by_position(result, std::move(col_to)); - } - } else { - return Status::RuntimeError( - "Illegal column {} of first argument and type {} of function {}", - block.get_by_position(arguments[0]).column->get_name(), - block.get_by_position(arguments[0]).type->get_name(), Transform::name); } - return Status::OK(); } }; +// Used for date(time) add/sub date(time)/integer. the input types are variadic and dispatch in execute. the return type is +// decided by Transform template class FunctionDateOrDateTimeComputation : public IFunction { public: @@ -708,41 +423,14 @@ class FunctionDateOrDateTimeComputation : public IFunction { size_t get_number_of_arguments() const override { return 0; } DataTypes get_variadic_argument_types_impl() const override { - if constexpr (has_variadic_argument) return Transform::get_variadic_argument_types(); + if constexpr (has_variadic_argument) { + return Transform::get_variadic_argument_types(); + } return {}; } bool use_default_implementation_for_nulls() const override { return false; } DataTypePtr get_return_type_impl(const ColumnsWithTypeAndName& arguments) const override { - if (arguments.size() != 2 && arguments.size() != 3) { - throw doris::Exception(ErrorCode::INVALID_ARGUMENT, - "Number of arguments for function {} doesn't match: passed {} , " - "should be 2 or 3", - get_name(), arguments.size()); - } - - if (arguments.size() == 2) { - if (!is_date_or_datetime(remove_nullable(arguments[0].type)) && - !is_date_v2_or_datetime_v2(remove_nullable(arguments[0].type))) { - throw doris::Exception( - ErrorCode::INVALID_ARGUMENT, - "Illegal type {} of argument of function {}. Should be a date or a date " - "with time", - arguments[0].type->get_name(), get_name()); - } - } else { - if (!WhichDataType(remove_nullable(arguments[0].type)).is_date_time() || - !WhichDataType(remove_nullable(arguments[0].type)).is_date_time_v2() || - !WhichDataType(remove_nullable(arguments[2].type)).is_string()) { - throw doris::Exception( - ErrorCode::INVALID_ARGUMENT, - "Function {} supports 2 or 3 arguments. The 1st argument must be of type " - "Date or DateTime. The 2nd argument must be number. The 3rd argument " - "(optional) must be a constant string with timezone name. The timezone " - "argument is allowed only when the 1st argument has the type DateTime", - get_name()); - } - } RETURN_REAL_TYPE_FOR_DATEV2_FUNCTION(typename Transform::ReturnType); } @@ -753,48 +441,164 @@ class FunctionDateOrDateTimeComputation : public IFunction { WhichDataType which1(remove_nullable(first_arg_type)); WhichDataType which2(remove_nullable(second_arg_type)); + /// now dispatch with the two arguments' type. no need to consider return type because the same arguments decide a + /// unique return type which could be extracted from Transform. + + // for all `xxx_add/sub`, the second arg is int32. + // for `week/yearweek`, if it has the second arg, it's int32. + // in these situations, the first would be any datelike type. + if (which2.is_int32()) { + switch (which1.idx) { + case TypeIndex::Date: + return execute_inner(block, arguments, result, + input_rows_count); + break; + case TypeIndex::DateTime: + return execute_inner(block, arguments, result, + input_rows_count); + break; + case TypeIndex::DateV2: + return execute_inner(block, arguments, result, + input_rows_count); + break; + case TypeIndex::DateTimeV2: + return execute_inner(block, arguments, result, + input_rows_count); + break; + default: + return Status::InternalError("Illegal argument {} and {} of function {}", + block.get_by_position(arguments[0]).type->get_name(), + block.get_by_position(arguments[1]).type->get_name(), + get_name()); + } + } + // then consider datelike - datelike. everything is possible here as well. + // for `xxx_diff`, every combination of V2 is possible. but for V1 we only support Datetime - Datetime if (which1.is_date_v2() && which2.is_date_v2()) { - return DateTimeAddIntervalImpl::execute(block, arguments, - result, - input_rows_count); + return execute_inner(block, arguments, result, + input_rows_count); } else if (which1.is_date_time_v2() && which2.is_date_time_v2()) { - return DateTimeAddIntervalImpl< - DataTypeDateTimeV2::FieldType, Transform, - DataTypeDateTimeV2::FieldType>::execute(block, arguments, result, - input_rows_count); - } else if (which1.is_date_time() && which2.is_date_time()) { - return DateTimeAddIntervalImpl::execute(block, arguments, - result, - input_rows_count); + return execute_inner(block, arguments, result, + input_rows_count); } else if (which1.is_date_v2() && which2.is_date_time_v2()) { - return DateTimeAddIntervalImpl< - DataTypeDateV2::FieldType, Transform, - DataTypeDateTimeV2::FieldType>::execute(block, arguments, result, - input_rows_count); + return execute_inner(block, arguments, result, + input_rows_count); } else if (which1.is_date_time_v2() && which2.is_date_v2()) { - return DateTimeAddIntervalImpl::execute(block, arguments, - result, - input_rows_count); - } else if (which1.is_date()) { - return DateTimeAddIntervalImpl::execute( - block, arguments, result, input_rows_count); - } else if (which1.is_date_time()) { - return DateTimeAddIntervalImpl::execute( - block, arguments, result, input_rows_count); - } else if (which1.is_date_v2()) { - return DateTimeAddIntervalImpl::execute( - block, arguments, result, input_rows_count); - } else if (which1.is_date_time_v2()) { - return DateTimeAddIntervalImpl::execute( - block, arguments, result, input_rows_count); - } else { - return Status::RuntimeError("Illegal type {} of argument of function {}", - block.get_by_position(arguments[0]).type->get_name(), - get_name()); + return execute_inner(block, arguments, result, + input_rows_count); + } else if (which1.is_date_time() && which2.is_date_time()) { + return execute_inner(block, arguments, result, + input_rows_count); } + return Status::InternalError("Illegal argument {} and {} of function {}", + block.get_by_position(arguments[0]).type->get_name(), + block.get_by_position(arguments[1]).type->get_name(), + get_name()); + } + + template + static Status execute_inner(Block& block, const ColumnNumbers& arguments, uint32_t result, + size_t input_rows_count) { + using NativeType0 = DataType0::FieldType; + using NativeType1 = DataType1::FieldType; + using ResFieldType = typename Transform::ReturnType::FieldType; + using Op = DateTimeOp; + + auto get_null_map = [](const ColumnPtr& col) -> const NullMap* { + if (col->is_nullable()) { + return &static_cast(*col).get_null_map_data(); + } + // Const(Nullable) + if (const auto* const_col = check_and_get_column(col.get()); + const_col != nullptr && const_col->is_concrete_nullable()) { + return &static_cast(const_col->get_data_column()) + .get_null_map_data(); + } + return nullptr; + }; + + //ATTN: those null maps may be nullmap of ColumnConst(only 1 row) + // src column is always datelike type. + ColumnPtr& col0 = block.get_by_position(arguments[0]).column; + const NullMap* nullmap0 = get_null_map(col0); + // the second column may be delta column(xx_add/sub) or datelike column(xxx_diff) + ColumnPtr& col1 = block.get_by_position(arguments[1]).column; + const NullMap* nullmap1 = get_null_map(col1); + + // if null wrapped, extract nested column as src_nested_col + const ColumnPtr src_nested_col = remove_nullable(col0); + const auto result_nullable = block.get_by_position(result).type->is_nullable(); + auto res_col = ColumnVector::create(); + + // vector-const or vector-vector + if (const auto* sources = + check_and_get_column>(src_nested_col.get())) { + const ColumnPtr nest_col1 = remove_nullable(col1); + bool rconst = false; + // vector-const + if (const auto* nest_col1_const = check_and_get_column(*nest_col1)) { + rconst = true; + const auto col1_inside_const = assert_cast&>( + nest_col1_const->get_data_column()); + Op::vector_constant(sources->get_data(), res_col->get_data(), + col1_inside_const.get_data()[0], nullmap0, nullmap1); + } else { // vector-vector + const auto concrete_col1 = + assert_cast&>(*nest_col1); + Op::vector_vector(sources->get_data(), concrete_col1.get_data(), + res_col->get_data(), nullmap0, nullmap1); + } + + // update result nullmap with inputs + if (result_nullable) { + auto null_map = ColumnBool::create(input_rows_count, 0); + NullMap& result_null_map = assert_cast(*null_map).get_data(); + if (nullmap0) { + VectorizedUtils::update_null_map(result_null_map, *nullmap0); + } + if (nullmap1) { + VectorizedUtils::update_null_map(result_null_map, *nullmap1, rconst); + } + block.get_by_position(result).column = + ColumnNullable::create(std::move(res_col), std::move(null_map)); + } else { + block.replace_by_position(result, std::move(res_col)); + } + } else if (const auto* sources_const = + check_and_get_column_const>( + src_nested_col.get())) { + // const-vector + const auto col0_inside_const = + assert_cast&>(sources_const->get_data_column()); + const ColumnPtr nested_col1 = remove_nullable(col1); + const auto concrete_col1 = assert_cast&>(*nested_col1); + Op::constant_vector(col0_inside_const.get_data()[0], res_col->get_data(), + concrete_col1.get_data(), nullmap0, nullmap1); + + // update result nullmap with inputs + if (result_nullable) { + auto null_map = ColumnBool::create(input_rows_count, 0); + NullMap& result_null_map = assert_cast(*null_map).get_data(); + if (nullmap0) { + VectorizedUtils::update_null_map(result_null_map, *nullmap0, true); + } + if (nullmap1) { // no const-const here. default impl deal it. + VectorizedUtils::update_null_map(result_null_map, *nullmap1); + } + block.get_by_position(result).column = + ColumnNullable::create(std::move(res_col), std::move(null_map)); + } else { + block.replace_by_position(result, std::move(res_col)); + } + } else { // no const-const here. default impl deal it. + return Status::InternalError( + "Illegel columns for function {}:\n1: {} with type {}\n2: {} with type {}", + Transform::name, block.get_by_position(arguments[0]).name, + block.get_by_position(arguments[0]).type->get_name(), + block.get_by_position(arguments[1]).name, + block.get_by_position(arguments[1]).type->get_name()); + } + return Status::OK(); } }; @@ -1170,7 +974,9 @@ class CurrentDateFunctionBuilder : public FunctionBuilderImpl { FunctionBasePtr build_impl(const ColumnsWithTypeAndName& arguments, const DataTypePtr& return_type) const override { DataTypes data_types(arguments.size()); - for (size_t i = 0; i < arguments.size(); ++i) data_types[i] = arguments[i].type; + for (size_t i = 0; i < arguments.size(); ++i) { + data_types[i] = arguments[i].type; + } if (is_date_v2(return_type)) { auto function = FunctionCurrentDateOrDateTime< CurrentDateImpl>::create(); diff --git a/be/src/vec/functions/function_date_or_datetime_computation_v2.cpp b/be/src/vec/functions/function_date_or_datetime_computation_v2.cpp index ec9560456c131a..db43bf1818d38f 100644 --- a/be/src/vec/functions/function_date_or_datetime_computation_v2.cpp +++ b/be/src/vec/functions/function_date_or_datetime_computation_v2.cpp @@ -95,14 +95,14 @@ using FunctionDatetimeV2SubYears = FUNCTION_DATEV2_WITH_TWO_ARGS(NAME, IMPL, DataTypeDateTimeV2, DataTypeDateV2) \ FUNCTION_DATEV2_WITH_TWO_ARGS(NAME, IMPL, DataTypeDateV2, DataTypeDateTimeV2) \ FUNCTION_DATEV2_WITH_TWO_ARGS(NAME, IMPL, DataTypeDateV2, DataTypeDateV2) - +// these diff functions accept all v2 types. but for v1 only datetime. ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2DateDiff, DateDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2TimeDiff, TimeDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2YearsDiff, YearsDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2MonthsDiff, MonthsDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2WeeksDiff, WeeksDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2HoursDiff, HoursDiffImpl) -ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2MinutesDiff, MintueSDiffImpl) +ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2MinutesDiff, MintuesDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2SecondsDiff, SecondsDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2DaysDiff, DaysDiffImpl) ALL_FUNCTION_DATEV2_WITH_TWO_ARGS(FunctionDatetimeV2MilliSecondsDiff, MilliSecondsDiffImpl) diff --git a/be/src/vec/functions/function_helpers.h b/be/src/vec/functions/function_helpers.h index 8c7eec28fe2f6f..818badeee4551b 100644 --- a/be/src/vec/functions/function_helpers.h +++ b/be/src/vec/functions/function_helpers.h @@ -20,10 +20,8 @@ #pragma once -#include - +#include #include -#include #include "vec/columns/column.h" #include "vec/columns/column_const.h" @@ -53,11 +51,15 @@ const Type* check_and_get_data_type(const IDataType* data_type) { template const ColumnConst* check_and_get_column_const(const IColumn* column) { - if (!column || !is_column_const(*column)) return {}; + if (!column || !is_column_const(*column)) { + return nullptr; + } - const ColumnConst* res = assert_cast(column); + const auto* res = assert_cast(column); - if (!check_column(&res->get_data_column())) return {}; + if (!check_column(&res->get_data_column())) { + return nullptr; + } return res; } @@ -66,7 +68,9 @@ template const Type* check_and_get_column_constData(const IColumn* column) { const ColumnConst* res = check_and_get_column_const(column); - if (!res) return {}; + if (!res) { + return nullptr; + } return static_cast(&res->get_data_column()); } diff --git a/be/test/vec/function/function_test_util.h b/be/test/vec/function/function_test_util.h index a3809bf8ec6a48..1c4c0906b80d3e 100644 --- a/be/test/vec/function/function_test_util.h +++ b/be/test/vec/function/function_test_util.h @@ -69,7 +69,7 @@ using Row = std::pair; using DataSet = std::vector; using InputTypeSet = std::vector; -// FIXME: should use exception or expected to deal null value.w +// FIXME: should use exception or expected to deal null value. int64_t str_to_date_time(std::string datetime_str, bool data_time = true); uint32_t str_to_date_v2(std::string datetime_str, std::string datetime_format); uint64_t str_to_datetime_v2(std::string datetime_str, std::string datetime_format); diff --git a/be/test/vec/function/function_time_test.cpp b/be/test/vec/function/function_time_test.cpp index a4299de3557608..ddfc722c7ab452 100644 --- a/be/test/vec/function/function_time_test.cpp +++ b/be/test/vec/function/function_time_test.cpp @@ -15,6 +15,7 @@ // specific language governing permissions and limitations // under the License. +#include #include #include @@ -299,14 +300,22 @@ TEST(VTimestampFunctionsTest, years_add_test) { InputTypeSet input_types = {TypeIndex::DateTime, TypeIndex::Int32}; - DataSet data_set = { - {{std::string("2020-05-23 00:00:00"), 5}, str_to_date_time("2025-05-23 00:00:00")}, - {{std::string("2020-05-23 00:00:00"), -5}, str_to_date_time("2015-05-23 00:00:00")}, - {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23 00:00:00"), 8000}, Null()}, - {{Null(), 5}, Null()}}; + { + DataSet data_set = { + {{std::string("2020-05-23 00:00:00"), 5}, str_to_date_time("2025-05-23 00:00:00")}, + {{std::string("2020-05-23 00:00:00"), -5}, str_to_date_time("2015-05-23 00:00:00")}, + {{std::string(""), 5}, Null()}, + {{Null(), 5}, Null()}}; - static_cast(check_function(func_name, input_types, data_set)); + static_cast(check_function(func_name, input_types, data_set)); + } + + { + DataSet data_set = {{{std::string("2020-05-23 00:00:00"), 8000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } } TEST(VTimestampFunctionsTest, years_sub_test) { @@ -314,14 +323,22 @@ TEST(VTimestampFunctionsTest, years_sub_test) { InputTypeSet input_types = {TypeIndex::DateTime, TypeIndex::Int32}; - DataSet data_set = { - {{std::string("2020-05-23 00:00:00"), 5}, str_to_date_time("2015-05-23 00:00:00")}, - {{std::string("2020-05-23 00:00:00"), -5}, str_to_date_time("2025-05-23 00:00:00")}, - {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23 00:00:00"), 3000}, Null()}, - {{Null(), 5}, Null()}}; + { + DataSet data_set = { + {{std::string("2020-05-23 00:00:00"), 5}, str_to_date_time("2015-05-23 00:00:00")}, + {{std::string("2020-05-23 00:00:00"), -5}, str_to_date_time("2025-05-23 00:00:00")}, + {{std::string(""), 5}, Null()}, + {{Null(), 5}, Null()}}; - static_cast(check_function(func_name, input_types, data_set)); + static_cast(check_function(func_name, input_types, data_set)); + } + + { + DataSet data_set = {{{std::string("2020-05-23 00:00:00"), 3000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } } TEST(VTimestampFunctionsTest, months_add_test) { @@ -1043,11 +1060,18 @@ TEST(VTimestampFunctionsTest, years_add_v2_test) { {{std::string("2020-05-23"), 5}, str_to_date_v2("2025-05-23", "%Y-%m-%d")}, {{std::string("2020-05-23"), -5}, str_to_date_v2("2015-05-23", "%Y-%m-%d")}, {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23"), 8000}, Null()}, {{Null(), 5}, Null()}}; static_cast(check_function(func_name, input_types, data_set)); } + { + InputTypeSet input_types = {TypeIndex::DateV2, TypeIndex::Int32}; + + DataSet data_set = {{{std::string("2020-05-23"), 8000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } { InputTypeSet input_types = {TypeIndex::DateTimeV2, TypeIndex::Int32}; @@ -1057,12 +1081,19 @@ TEST(VTimestampFunctionsTest, years_add_v2_test) { {{std::string("2020-05-23 00:00:11.123"), -5}, str_to_datetime_v2("2015-05-23 00:00:11.123", "%Y-%m-%d %H:%i:%s.%f")}, {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23 00:00:11.123"), 8000}, Null()}, {{Null(), 5}, Null()}}; static_cast( check_function(func_name, input_types, data_set)); } + { + InputTypeSet input_types = {TypeIndex::DateTimeV2, TypeIndex::Int32}; + + DataSet data_set = {{{std::string("2020-05-23 00:00:11.123"), 8000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } } TEST(VTimestampFunctionsTest, years_sub_v2_test) { @@ -1075,11 +1106,19 @@ TEST(VTimestampFunctionsTest, years_sub_v2_test) { {{std::string("2020-05-23"), 5}, str_to_date_v2("2015-05-23", "%Y-%m-%d")}, {{std::string("2020-05-23"), -5}, str_to_date_v2("2025-05-23", "%Y-%m-%d")}, {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23"), 3000}, Null()}, {{Null(), 5}, Null()}}; static_cast(check_function(func_name, input_types, data_set)); } + { + InputTypeSet input_types = {TypeIndex::DateV2, TypeIndex::Int32}; + + DataSet data_set = {{{std::string("2020-05-23"), 3000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } + { InputTypeSet input_types = {TypeIndex::DateTimeV2, TypeIndex::Int32}; @@ -1088,12 +1127,19 @@ TEST(VTimestampFunctionsTest, years_sub_v2_test) { {{std::string("2020-05-23 00:00:11.123"), -5}, str_to_datetime_v2("2025-05-23 00:00:11.123", "%Y-%m-%d %H:%i:%s.%f")}, {{std::string(""), 5}, Null()}, - {{std::string("2020-05-23 00:00:11.123"), 3000}, Null()}, {{Null(), 5}, Null()}}; static_cast( check_function(func_name, input_types, data_set)); } + { + InputTypeSet input_types = {TypeIndex::DateTimeV2, TypeIndex::Int32}; + + DataSet data_set = {{{std::string("2020-05-23 00:00:11.123"), 3000}, Null()}}; + + EXPECT_ANY_THROW(static_cast( + check_function(func_name, input_types, data_set))); + } } TEST(VTimestampFunctionsTest, months_add_v2_test) { diff --git a/regression-test/suites/correctness/test_date_function_const.groovy b/regression-test/suites/correctness/test_date_function_const.groovy index d1ba4db4e68987..e9bf11bd24ebd6 100644 --- a/regression-test/suites/correctness/test_date_function_const.groovy +++ b/regression-test/suites/correctness/test_date_function_const.groovy @@ -61,6 +61,6 @@ suite("test_date_function_const") { test { sql """select date_add("1900-01-01 12:00:00.123456", interval 10000000000 month);""" - exception "Operation months_add 133705200962757184 1410065408 out of range" + exception "Operation months_add of 1900-01-01 12:00:00.123456, 1410065408 out of range" } } diff --git a/regression-test/suites/nereids_p0/sql_functions/datetime_functions/test_date_or_datetime_computation_negative.groovy b/regression-test/suites/nereids_p0/sql_functions/datetime_functions/test_date_or_datetime_computation_negative.groovy index 282a28a903e4a0..53b7385b1535df 100644 --- a/regression-test/suites/nereids_p0/sql_functions/datetime_functions/test_date_or_datetime_computation_negative.groovy +++ b/regression-test/suites/nereids_p0/sql_functions/datetime_functions/test_date_or_datetime_computation_negative.groovy @@ -14,6 +14,7 @@ // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. + suite("test_date_or_datetime_computation_negative") { sql """ CREATE TABLE IF NOT EXISTS test_date_or_datetime_computation_negative ( `row_id` LARGEINT NOT NULL, @@ -50,8 +51,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 year) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 year), date_sub(dateV2_null, interval 1 year), date_sub(datetime_null, interval 1 year) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_1 """SELECT date_sub(date_null, interval 1 year), date_sub(dateV2_null, interval 1 year), date_sub(datetime_null, interval 1 year) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_sub(date, interval 1 month) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" @@ -65,8 +69,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 month) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 month), date_sub(dateV2_null, interval 1 month), date_sub(datetime_null, interval 1 month) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_2 """SELECT date_sub(date_null, interval 1 month), date_sub(dateV2_null, interval 1 month), date_sub(datetime_null, interval 1 month) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """ SELECT date_sub(date, interval 1 week) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" @@ -80,10 +87,12 @@ suite("test_date_or_datetime_computation_negative") { sql """ SELECT date_sub(datetime, interval 1 week) FROM test_date_or_datetime_computation_negative WHERE row_id=1; """ check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 week), date_sub(dateV2_null, interval 1 week), date_sub(datetime_null, interval 1 week) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_3 """SELECT date_sub(date_null, interval 1 week), date_sub(dateV2_null, interval 1 week), date_sub(datetime_null, interval 1 week) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - test { sql """SELECT date_sub(date, interval 1 day) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> @@ -96,10 +105,12 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 day) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 day), date_sub(dateV2_null, interval 1 day), date_sub(datetime_null, interval 1 day) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_4 """SELECT date_sub(date_null, interval 1 day), date_sub(dateV2_null, interval 1 day), date_sub(datetime_null, interval 1 day) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - test { sql """SELECT date_sub(date, interval 1 hour) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> @@ -112,8 +123,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 hour) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """ SELECT date_sub(date_null, interval 1 hour), date_sub(dateV2_null, interval 1 hour), date_sub(datetime_null, interval 1 hour) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_5 """ SELECT date_sub(date_null, interval 1 hour), date_sub(dateV2_null, interval 1 hour), date_sub(datetime_null, interval 1 hour) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_sub(date, interval 1 minute) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" @@ -127,8 +141,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 minute) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 minute), date_sub(dateV2_null, interval 1 minute), date_sub(datetime_null, interval 1 minute) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_6 """SELECT date_sub(date_null, interval 1 minute), date_sub(dateV2_null, interval 1 minute), date_sub(datetime_null, interval 1 minute) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_sub(date, interval 1 second) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" @@ -142,8 +159,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_sub(datetime, interval 1 second) FROM test_date_or_datetime_computation_negative WHERE row_id=1;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_sub(date_null, interval 1 second), date_sub(dateV2_null, interval 1 second), date_sub(datetime_null, interval 1 second) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_7 """SELECT date_sub(date_null, interval 1 second), date_sub(dateV2_null, interval 1 second), date_sub(datetime_null, interval 1 second) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { @@ -158,8 +178,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 year) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 year), date_add(dateV2_null, interval 1 year), date_add(datetime_null, interval 1 year) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_8 """SELECT date_add(date_null, interval 1 year), date_add(dateV2_null, interval 1 year), date_add(datetime_null, interval 1 year) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_add(date, interval 1 month) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" @@ -173,8 +196,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 month) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 month), date_add(dateV2_null, interval 1 month), date_add(datetime_null, interval 1 month) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_9 """SELECT date_add(date_null, interval 1 month), date_add(dateV2_null, interval 1 month), date_add(datetime_null, interval 1 month) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """ SELECT date_add(date, interval 1 week) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" @@ -188,10 +214,12 @@ suite("test_date_or_datetime_computation_negative") { sql """ SELECT date_add(datetime, interval 1 week) FROM test_date_or_datetime_computation_negative WHERE row_id=3; """ check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 week), date_add(dateV2_null, interval 1 week), date_add(datetime_null, interval 1 week) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_10 """SELECT date_add(date_null, interval 1 week), date_add(dateV2_null, interval 1 week), date_add(datetime_null, interval 1 week) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - test { sql """SELECT date_add(date, interval 1 day) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> @@ -204,10 +232,12 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 day) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 day), date_add(dateV2_null, interval 1 day), date_add(datetime_null, interval 1 day) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_11 """SELECT date_add(date_null, interval 1 day), date_add(dateV2_null, interval 1 day), date_add(datetime_null, interval 1 day) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - test { sql """SELECT date_add(date, interval 1 hour) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> @@ -220,8 +250,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 hour) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """ SELECT date_add(date_null, interval 1 hour), date_add(dateV2_null, interval 1 hour), date_add(datetime_null, interval 1 hour) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_12 """ SELECT date_add(date_null, interval 1 hour), date_add(dateV2_null, interval 1 hour), date_add(datetime_null, interval 1 hour) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_add(date, interval 1 minute) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" @@ -235,8 +268,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 minute) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 minute), date_add(dateV2_null, interval 1 minute), date_add(datetime_null, interval 1 minute) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_13 """SELECT date_add(date_null, interval 1 minute), date_add(dateV2_null, interval 1 minute), date_add(datetime_null, interval 1 minute) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" test { sql """SELECT date_add(date, interval 1 second) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" @@ -250,8 +286,11 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT date_add(datetime, interval 1 second) FROM test_date_or_datetime_computation_negative WHERE row_id=3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT date_add(date_null, interval 1 second), date_add(dateV2_null, interval 1 second), date_add(datetime_null, interval 1 second) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_14 """SELECT date_add(date_null, interval 1 second), date_add(dateV2_null, interval 1 second), date_add(datetime_null, interval 1 second) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" // TODO: // nagetive test for microseconds_add/milliseconds_add/seconds_add/minutes_add/hours_add/days_add/weeks_add/months_add/years_add @@ -268,8 +307,9 @@ suite("test_date_or_datetime_computation_negative") { sql """SELECT hours_add(datetime, 24) FROM test_date_or_datetime_computation_negative WHERE row_id = 3;""" check {result, exception, startTime, endTime -> assertTrue (exception != null)} + + sql """SELECT hours_add(date_null, 24), hours_add(dateV2_null, 24), hours_add(datetime_null, 24) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" + check {result, exception, startTime, endTime -> + assertTrue (exception != null)} } - qt_select_nullable_15 """SELECT hours_add(date_null, 24), hours_add(dateV2_null, 24), hours_add(datetime_null, 24) FROM test_date_or_datetime_computation_negative ORDER BY row_id;""" - - sql "DROP TABLE test_date_or_datetime_computation_negative" } From cd916c6a7cf6b7df2fd910bc6f5d123416715b36 Mon Sep 17 00:00:00 2001 From: Siyang Tang Date: Mon, 23 Dec 2024 12:25:46 +0800 Subject: [PATCH 50/82] [enhancement](tablet-meta) Avoid be coredump due to potential race condition when updating tablet cumu point (#45643) Currently, when setting tablet's cumu point, aseert fail will happend if new point is less than local value, resulting BE coredump. This could happend when race condition happend: 1. thread A try to sync rowset 2. thread A fetch cumu point from ms 3. thread B update cumu point(like sc/compaction),commit to ms after 2. and set be tablet cumu point before 4. 4. thread A try to set cumu point seen before and meet the assertion, coredump. --- be/src/cloud/cloud_tablet.cpp | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/be/src/cloud/cloud_tablet.cpp b/be/src/cloud/cloud_tablet.cpp index c7d3170726b2d5..4e351f7cfa5110 100644 --- a/be/src/cloud/cloud_tablet.cpp +++ b/be/src/cloud/cloud_tablet.cpp @@ -33,6 +33,7 @@ #include "cloud/cloud_meta_mgr.h" #include "cloud/cloud_storage_engine.h" #include "cloud/cloud_tablet_mgr.h" +#include "common/logging.h" #include "io/cache/block_file_cache_downloader.h" #include "io/cache/block_file_cache_factory.h" #include "olap/cumulative_compaction_time_series_policy.h" @@ -657,11 +658,14 @@ void CloudTablet::get_compaction_status(std::string* json_result) { } void CloudTablet::set_cumulative_layer_point(int64_t new_point) { + if (new_point == Tablet::K_INVALID_CUMULATIVE_POINT || new_point >= _cumulative_point) { + _cumulative_point = new_point; + return; + } // cumulative point should only be reset to -1, or be increased - CHECK(new_point == Tablet::K_INVALID_CUMULATIVE_POINT || new_point >= _cumulative_point) - << "Unexpected cumulative point: " << new_point - << ", origin: " << _cumulative_point.load(); - _cumulative_point = new_point; + // FIXME: could happen in currently unresolved race conditions + LOG(WARNING) << "Unexpected cumulative point: " << new_point + << ", origin: " << _cumulative_point.load(); } std::vector CloudTablet::pick_candidate_rowsets_to_base_compaction() { From cccc9bb8beddb271537d4434cab20dc1ad16dde8 Mon Sep 17 00:00:00 2001 From: "Mingyu Chen (Rayner)" Date: Mon, 23 Dec 2024 13:53:27 +0800 Subject: [PATCH 51/82] [opt](log) add more info in nereids timeout log (#45705) ### What problem does this PR solve? Add elapsed time in log --- .../doris/nereids/jobs/scheduler/SimpleJobScheduler.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/scheduler/SimpleJobScheduler.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/scheduler/SimpleJobScheduler.java index e31c92e3fe004c..1354f895a3c07a 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/scheduler/SimpleJobScheduler.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/jobs/scheduler/SimpleJobScheduler.java @@ -34,11 +34,11 @@ public void executeJobPool(ScheduleContext scheduleContext) { CascadesContext context = (CascadesContext) scheduleContext; SessionVariable sessionVariable = context.getConnectContext().getSessionVariable(); while (!pool.isEmpty()) { + long elapsedS = context.getStatementContext().getStopwatch().elapsed(TimeUnit.MILLISECONDS) / 1000; if (sessionVariable.enableNereidsTimeout - && context.getStatementContext().getStopwatch().elapsed(TimeUnit.MILLISECONDS) - > sessionVariable.nereidsTimeoutSecond * 1000L) { - throw new AnalysisException( - "Nereids cost too much time ( > " + sessionVariable.nereidsTimeoutSecond + "s )"); + && elapsedS > sessionVariable.nereidsTimeoutSecond) { + throw new AnalysisException(String.format("Nereids cost too much time ( %ds > %ds", + elapsedS, sessionVariable.nereidsTimeoutSecond)); } Job job = pool.pop(); job.execute(); From a032ece525c1b4bc5e15826e6f54fc82d5ccbee9 Mon Sep 17 00:00:00 2001 From: zzzxl Date: Mon, 23 Dec 2024 14:52:17 +0800 Subject: [PATCH 52/82] [opt](inverted index) Add Inverted Index Cache Toggle (#45718) Problem Summary: 1. Adding an inverted index cache toggle can help with debugging. --- .../segment_v2/inverted_index_reader.cpp | 74 ++++++--- .../rowset/segment_v2/inverted_index_reader.h | 38 +++-- .../org/apache/doris/qe/SessionVariable.java | 16 ++ gensrc/thrift/PaloInternalService.thrift | 4 + .../test_inverted_index_cache.out | 22 +++ .../test_inverted_index_cache.groovy | 144 ++++++++++++++++++ 6 files changed, 260 insertions(+), 38 deletions(-) create mode 100644 regression-test/data/fault_injection_p0/test_inverted_index_cache.out create mode 100644 regression-test/suites/fault_injection_p0/test_inverted_index_cache.groovy diff --git a/be/src/olap/rowset/segment_v2/inverted_index_reader.cpp b/be/src/olap/rowset/segment_v2/inverted_index_reader.cpp index b40f9121125207..9790d7273e1bff 100644 --- a/be/src/olap/rowset/segment_v2/inverted_index_reader.cpp +++ b/be/src/olap/rowset/segment_v2/inverted_index_reader.cpp @@ -164,16 +164,48 @@ Status InvertedIndexReader::read_null_bitmap(const io::IOContext* io_ctx, return Status::OK(); } +Status InvertedIndexReader::handle_query_cache(RuntimeState* runtime_state, + InvertedIndexQueryCache* cache, + const InvertedIndexQueryCache::CacheKey& cache_key, + InvertedIndexQueryCacheHandle* cache_handler, + OlapReaderStatistics* stats, + std::shared_ptr& bit_map) { + const auto& query_options = runtime_state->query_options(); + if (query_options.enable_inverted_index_query_cache && + cache->lookup(cache_key, cache_handler)) { + DBUG_EXECUTE_IF("InvertedIndexReader.handle_query_cache_hit", { + return Status::Error("handle query cache hit"); + }); + stats->inverted_index_query_cache_hit++; + SCOPED_RAW_TIMER(&stats->inverted_index_query_bitmap_copy_timer); + bit_map = cache_handler->get_bitmap(); + return Status::OK(); + } + DBUG_EXECUTE_IF("InvertedIndexReader.handle_query_cache_miss", { + return Status::Error("handle query cache miss"); + }); + stats->inverted_index_query_cache_miss++; + return Status::Error("cache miss"); +} + Status InvertedIndexReader::handle_searcher_cache( - InvertedIndexCacheHandle* inverted_index_cache_handle, const io::IOContext* io_ctx, - OlapReaderStatistics* stats) { + RuntimeState* runtime_state, InvertedIndexCacheHandle* inverted_index_cache_handle, + const io::IOContext* io_ctx, OlapReaderStatistics* stats) { auto index_file_key = _inverted_index_file_reader->get_index_file_cache_key(&_index_meta); InvertedIndexSearcherCache::CacheKey searcher_cache_key(index_file_key); - if (InvertedIndexSearcherCache::instance()->lookup(searcher_cache_key, + const auto& query_options = runtime_state->query_options(); + if (query_options.enable_inverted_index_searcher_cache && + InvertedIndexSearcherCache::instance()->lookup(searcher_cache_key, inverted_index_cache_handle)) { + DBUG_EXECUTE_IF("InvertedIndexReader.handle_searcher_cache_hit", { + return Status::Error("handle searcher cache hit"); + }); stats->inverted_index_searcher_cache_hit++; return Status::OK(); } else { + DBUG_EXECUTE_IF("InvertedIndexReader.handle_searcher_cache_miss", { + return Status::Error("handle searcher cache miss"); + }); // searcher cache miss stats->inverted_index_searcher_cache_miss++; auto mem_tracker = std::make_unique("InvertedIndexSearcherCacheWithRead"); @@ -311,14 +343,16 @@ Status FullTextIndexReader::query(const io::IOContext* io_ctx, OlapReaderStatist InvertedIndexQueryCacheHandle cache_handler; std::shared_ptr term_match_bitmap = nullptr; - auto cache_status = handle_query_cache(cache, cache_key, &cache_handler, stats, bit_map); + auto cache_status = + handle_query_cache(runtime_state, cache, cache_key, &cache_handler, stats, bit_map); if (cache_status.ok()) { return Status::OK(); } FulltextIndexSearcherPtr* searcher_ptr = nullptr; InvertedIndexCacheHandle inverted_index_cache_handle; - RETURN_IF_ERROR(handle_searcher_cache(&inverted_index_cache_handle, io_ctx, stats)); + RETURN_IF_ERROR( + handle_searcher_cache(runtime_state, &inverted_index_cache_handle, io_ctx, stats)); auto searcher_variant = inverted_index_cache_handle.get_index_searcher(); searcher_ptr = std::get_if(&searcher_variant); if (searcher_ptr != nullptr) { @@ -379,7 +413,8 @@ Status StringTypeInvertedIndexReader::query(const io::IOContext* io_ctx, search_str}; auto* cache = InvertedIndexQueryCache::instance(); InvertedIndexQueryCacheHandle cache_handler; - auto cache_status = handle_query_cache(cache, cache_key, &cache_handler, stats, bit_map); + auto cache_status = + handle_query_cache(runtime_state, cache, cache_key, &cache_handler, stats, bit_map); if (cache_status.ok()) { return Status::OK(); } @@ -393,7 +428,8 @@ Status StringTypeInvertedIndexReader::query(const io::IOContext* io_ctx, auto result = std::make_shared(); FulltextIndexSearcherPtr* searcher_ptr = nullptr; InvertedIndexCacheHandle inverted_index_cache_handle; - RETURN_IF_ERROR(handle_searcher_cache(&inverted_index_cache_handle, io_ctx, stats)); + RETURN_IF_ERROR( + handle_searcher_cache(runtime_state, &inverted_index_cache_handle, io_ctx, stats)); auto searcher_variant = inverted_index_cache_handle.get_index_searcher(); searcher_ptr = std::get_if(&searcher_variant); if (searcher_ptr != nullptr) { @@ -609,11 +645,12 @@ Status BkdIndexReader::invoke_bkd_query(const void* query_value, InvertedIndexQu } Status BkdIndexReader::try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) { + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) { try { std::shared_ptr r; - auto st = get_bkd_reader(r, io_ctx, stats); + auto st = get_bkd_reader(r, io_ctx, stats, runtime_state); if (!st.ok()) { LOG(WARNING) << "get bkd reader for " << _inverted_index_file_reader->get_index_file_path(&_index_meta) @@ -629,7 +666,8 @@ Status BkdIndexReader::try_query(const io::IOContext* io_ctx, OlapReaderStatisti auto* cache = InvertedIndexQueryCache::instance(); InvertedIndexQueryCacheHandle cache_handler; std::shared_ptr bit_map; - auto cache_status = handle_query_cache(cache, cache_key, &cache_handler, stats, bit_map); + auto cache_status = + handle_query_cache(runtime_state, cache, cache_key, &cache_handler, stats, bit_map); if (cache_status.ok()) { *count = bit_map->cardinality(); return Status::OK(); @@ -653,7 +691,7 @@ Status BkdIndexReader::query(const io::IOContext* io_ctx, OlapReaderStatistics* try { std::shared_ptr r; - auto st = get_bkd_reader(r, io_ctx, stats); + auto st = get_bkd_reader(r, io_ctx, stats, runtime_state); if (!st.ok()) { LOG(WARNING) << "get bkd reader for " << _inverted_index_file_reader->get_index_file_path(&_index_meta) @@ -668,7 +706,8 @@ Status BkdIndexReader::query(const io::IOContext* io_ctx, OlapReaderStatistics* query_str}; auto* cache = InvertedIndexQueryCache::instance(); InvertedIndexQueryCacheHandle cache_handler; - auto cache_status = handle_query_cache(cache, cache_key, &cache_handler, stats, bit_map); + auto cache_status = + handle_query_cache(runtime_state, cache, cache_key, &cache_handler, stats, bit_map); if (cache_status.ok()) { return Status::OK(); } @@ -690,10 +729,11 @@ Status BkdIndexReader::query(const io::IOContext* io_ctx, OlapReaderStatistics* } Status BkdIndexReader::get_bkd_reader(BKDIndexSearcherPtr& bkd_reader, const io::IOContext* io_ctx, - OlapReaderStatistics* stats) { + OlapReaderStatistics* stats, RuntimeState* runtime_state) { BKDIndexSearcherPtr* bkd_searcher = nullptr; InvertedIndexCacheHandle inverted_index_cache_handle; - RETURN_IF_ERROR(handle_searcher_cache(&inverted_index_cache_handle, io_ctx, stats)); + RETURN_IF_ERROR( + handle_searcher_cache(runtime_state, &inverted_index_cache_handle, io_ctx, stats)); auto searcher_variant = inverted_index_cache_handle.get_index_searcher(); bkd_searcher = std::get_if(&searcher_variant); if (bkd_searcher) { @@ -1138,8 +1178,8 @@ Status InvertedIndexIterator::try_read_from_inverted_index(const std::string& co query_type == InvertedIndexQueryType::LESS_EQUAL_QUERY || query_type == InvertedIndexQueryType::LESS_THAN_QUERY || query_type == InvertedIndexQueryType::EQUAL_QUERY) { - RETURN_IF_ERROR( - _reader->try_query(&_io_ctx, _stats, column_name, query_value, query_type, count)); + RETURN_IF_ERROR(_reader->try_query(&_io_ctx, _stats, _runtime_state, column_name, + query_value, query_type, count)); } return Status::OK(); } diff --git a/be/src/olap/rowset/segment_v2/inverted_index_reader.h b/be/src/olap/rowset/segment_v2/inverted_index_reader.h index a1445603286619..bbd148fae5250d 100644 --- a/be/src/olap/rowset/segment_v2/inverted_index_reader.h +++ b/be/src/olap/rowset/segment_v2/inverted_index_reader.h @@ -190,8 +190,9 @@ class InvertedIndexReader : public std::enable_shared_from_this& bit_map) = 0; virtual Status try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) = 0; + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) = 0; Status read_null_bitmap(const io::IOContext* io_ctx, OlapReaderStatistics* stats, InvertedIndexQueryCacheHandle* cache_handle, @@ -208,22 +209,14 @@ class InvertedIndexReader : public std::enable_shared_from_this& bit_map) { - if (cache->lookup(cache_key, cache_handler)) { - stats->inverted_index_query_cache_hit++; - SCOPED_RAW_TIMER(&stats->inverted_index_query_bitmap_copy_timer); - bit_map = cache_handler->get_bitmap(); - return Status::OK(); - } - stats->inverted_index_query_cache_miss++; - return Status::Error("cache miss"); - } + std::shared_ptr& bit_map); - virtual Status handle_searcher_cache(InvertedIndexCacheHandle* inverted_index_cache_handle, + virtual Status handle_searcher_cache(RuntimeState* runtime_state, + InvertedIndexCacheHandle* inverted_index_cache_handle, const io::IOContext* io_ctx, OlapReaderStatistics* stats); std::string get_index_file_path(); static Status create_index_searcher(lucene::store::Directory* dir, IndexSearcherPtr* searcher, @@ -262,8 +255,9 @@ class FullTextIndexReader : public InvertedIndexReader { const void* query_value, InvertedIndexQueryType query_type, std::shared_ptr& bit_map) override; Status try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) override { + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) override { return Status::Error( "FullTextIndexReader not support try_query"); } @@ -289,8 +283,9 @@ class StringTypeInvertedIndexReader : public InvertedIndexReader { const void* query_value, InvertedIndexQueryType query_type, std::shared_ptr& bit_map) override; Status try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) override { + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) override { return Status::Error( "StringTypeInvertedIndexReader not support try_query"); } @@ -350,8 +345,9 @@ class BkdIndexReader : public InvertedIndexReader { const void* query_value, InvertedIndexQueryType query_type, std::shared_ptr& bit_map) override; Status try_query(const io::IOContext* io_ctx, OlapReaderStatistics* stats, - const std::string& column_name, const void* query_value, - InvertedIndexQueryType query_type, uint32_t* count) override; + RuntimeState* runtime_state, const std::string& column_name, + const void* query_value, InvertedIndexQueryType query_type, + uint32_t* count) override; Status invoke_bkd_try_query(const void* query_value, InvertedIndexQueryType query_type, std::shared_ptr r, uint32_t* count); Status invoke_bkd_query(const void* query_value, InvertedIndexQueryType query_type, @@ -364,7 +360,7 @@ class BkdIndexReader : public InvertedIndexReader { InvertedIndexReaderType type() override; Status get_bkd_reader(BKDIndexSearcherPtr& reader, const io::IOContext* io_ctx, - OlapReaderStatistics* stats); + OlapReaderStatistics* stats, RuntimeState* runtime_state); private: const TypeInfo* _type_info {}; diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index 75f21c786b8c37..cf26cce7383e1c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -679,6 +679,8 @@ public class SessionVariable implements Serializable, Writable { public static final String ENABLE_MATCH_WITHOUT_INVERTED_INDEX = "enable_match_without_inverted_index"; public static final String ENABLE_FALLBACK_ON_MISSING_INVERTED_INDEX = "enable_fallback_on_missing_inverted_index"; + public static final String ENABLE_INVERTED_INDEX_SEARCHER_CACHE = "enable_inverted_index_searcher_cache"; + public static final String ENABLE_INVERTED_INDEX_QUERY_CACHE = "enable_inverted_index_query_cache"; public static final String IN_LIST_VALUE_COUNT_THRESHOLD = "in_list_value_count_threshold"; @@ -2304,6 +2306,18 @@ public void setIgnoreShapePlanNodes(String ignoreShapePlanNodes) { }) public boolean enableFallbackOnMissingInvertedIndex = true; + @VariableMgr.VarAttr(name = ENABLE_INVERTED_INDEX_SEARCHER_CACHE, description = { + "开启后会缓存倒排索引searcher", + "Enabling this will cache the inverted index searcher." + }) + public boolean enableInvertedIndexSearcherCache = true; + + @VariableMgr.VarAttr(name = ENABLE_INVERTED_INDEX_QUERY_CACHE, description = { + "开启后会缓存倒排索引查询结果", + "Enabling this will cache the results of inverted index queries." + }) + public boolean enableInvertedIndexQueryCache = true; + @VariableMgr.VarAttr(name = IN_LIST_VALUE_COUNT_THRESHOLD, description = { "in条件value数量大于这个threshold后将不会走fast_execute", "When the number of values in the IN condition exceeds this threshold," @@ -3990,6 +4004,8 @@ public TQueryOptions toThrift() { tResult.setEnableMatchWithoutInvertedIndex(enableMatchWithoutInvertedIndex); tResult.setEnableFallbackOnMissingInvertedIndex(enableFallbackOnMissingInvertedIndex); + tResult.setEnableInvertedIndexSearcherCache(enableInvertedIndexSearcherCache); + tResult.setEnableInvertedIndexQueryCache(enableInvertedIndexQueryCache); tResult.setHiveOrcUseColumnNames(hiveOrcUseColumnNames); tResult.setHiveParquetUseColumnNames(hiveParquetUseColumnNames); tResult.setKeepCarriageReturn(keepCarriageReturn); diff --git a/gensrc/thrift/PaloInternalService.thrift b/gensrc/thrift/PaloInternalService.thrift index 0a1ea4a98fca94..f4d367659e4892 100644 --- a/gensrc/thrift/PaloInternalService.thrift +++ b/gensrc/thrift/PaloInternalService.thrift @@ -360,6 +360,10 @@ struct TQueryOptions { 141: optional bool ignore_runtime_filter_error = false; 142: optional bool enable_fixed_len_to_uint32_v2 = false; 143: optional bool enable_shared_exchange_sink_buffer = true; + + 144: optional bool enable_inverted_index_searcher_cache = true; + 145: optional bool enable_inverted_index_query_cache = true; + // For cloud, to control if the content would be written into file cache // In write path, to control if the content would be written into file cache. // In read path, read from file cache or remote storage when execute query. diff --git a/regression-test/data/fault_injection_p0/test_inverted_index_cache.out b/regression-test/data/fault_injection_p0/test_inverted_index_cache.out new file mode 100644 index 00000000000000..7d166b8b78d5d3 --- /dev/null +++ b/regression-test/data/fault_injection_p0/test_inverted_index_cache.out @@ -0,0 +1,22 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !sql -- +863 + +-- !sql -- +863 + +-- !sql -- +863 + +-- !sql -- +863 + +-- !sql -- +350 + +-- !sql -- +863 + +-- !sql -- +350 + diff --git a/regression-test/suites/fault_injection_p0/test_inverted_index_cache.groovy b/regression-test/suites/fault_injection_p0/test_inverted_index_cache.groovy new file mode 100644 index 00000000000000..fd250a7d4fd528 --- /dev/null +++ b/regression-test/suites/fault_injection_p0/test_inverted_index_cache.groovy @@ -0,0 +1,144 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_inverted_index_cache", "nonConcurrent") { + // define a sql table + def indexTbName = "test_inverted_index_cache" + + sql "DROP TABLE IF EXISTS ${indexTbName}" + sql """ + CREATE TABLE ${indexTbName} ( + `@timestamp` int(11) NULL COMMENT "", + `clientip` varchar(20) NULL COMMENT "", + `request` text NULL COMMENT "", + `status` int(11) NULL COMMENT "", + `size` int(11) NULL COMMENT "", + INDEX request_idx (`request`) USING INVERTED PROPERTIES("parser" = "english", "support_phrase" = "true") COMMENT '', + ) ENGINE=OLAP + DUPLICATE KEY(`@timestamp`) + COMMENT "OLAP" + DISTRIBUTED BY RANDOM BUCKETS 1 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "disable_auto_compaction" = "true" + ); + """ + + def load_httplogs_data = {table_name, label, read_flag, format_flag, file_name, ignore_failure=false, + expected_succ_rows = -1, load_to_single_tablet = 'true' -> + + // load the json data + streamLoad { + table "${table_name}" + + // set http request header params + set 'label', label + "_" + UUID.randomUUID().toString() + set 'read_json_by_line', read_flag + set 'format', format_flag + file file_name // import json file + time 10000 // limit inflight 10s + if (expected_succ_rows >= 0) { + set 'max_filter_ratio', '1' + } + + // if declared a check callback, the default check condition will ignore. + // So you must check all condition + check { result, exception, startTime, endTime -> + if (ignore_failure && expected_succ_rows < 0) { return } + if (exception != null) { + throw exception + } + log.info("Stream load result: ${result}".toString()) + def json = parseJson(result) + } + } + } + + load_httplogs_data.call(indexTbName, 'test_index_inlist_fault_injection', 'true', 'json', 'documents-1000.json') + sql "sync" + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + + // query cache hit + // searcher cache hit + try { + sql """ set enable_inverted_index_query_cache = true """ + sql """ set enable_inverted_index_searcher_cache = true """ + + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_miss") + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_miss") + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + + } finally { + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_miss") + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_miss") + } + + // query cache miss + // searcher cache hit + try { + sql """ set enable_inverted_index_query_cache = false """ + sql """ set enable_inverted_index_searcher_cache = true """ + + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_hit") + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_miss") + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + + } finally { + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_hit") + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_miss") + } + + // query cache hit + // searcher cache miss + try { + sql """ set enable_inverted_index_query_cache = true """ + sql """ set enable_inverted_index_searcher_cache = false """ + + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_miss") + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_hit") + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + qt_sql """ select count() from ${indexTbName} where (request match 'english'); """ + + } finally { + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_miss") + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_hit") + } + + // query cache miss + // searcher cache miss + try { + sql """ set enable_inverted_index_query_cache = false """ + sql """ set enable_inverted_index_searcher_cache = false """ + + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_hit") + GetDebugPoint().enableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_hit") + + qt_sql """ select count() from ${indexTbName} where (request match 'images'); """ + qt_sql """ select count() from ${indexTbName} where (request match 'english'); """ + + } finally { + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_query_cache_hit") + GetDebugPoint().disableDebugPointForAllBEs("InvertedIndexReader.handle_searcher_cache_hit") + } + + sql """ set enable_inverted_index_query_cache = true """ + sql """ set enable_inverted_index_searcher_cache = true """ +} \ No newline at end of file From e09bc04bce094e16481c55e73294b03995761de8 Mon Sep 17 00:00:00 2001 From: yujun Date: Mon, 23 Dec 2024 16:08:35 +0800 Subject: [PATCH 53/82] [test](nereids) add test simplify comparison predicate (#44886) ### What problem does this PR solve? Add test simplify comparison predicate --- .../SimplifyComparisonPredicateTest.java | 191 +++++++++++++++++- .../test_simplify_comparison_predicate.groovy | 170 ++++++++++++++++ 2 files changed, 360 insertions(+), 1 deletion(-) create mode 100644 regression-test/suites/nereids_rules_p0/expression/test_simplify_comparison_predicate.groovy diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java index bab3b4385137e8..9a36fb59b9f18d 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateTest.java @@ -40,6 +40,7 @@ import org.apache.doris.nereids.trees.expressions.literal.DateV2Literal; import org.apache.doris.nereids.trees.expressions.literal.DecimalV3Literal; import org.apache.doris.nereids.trees.expressions.literal.DoubleLiteral; +import org.apache.doris.nereids.trees.expressions.literal.FloatLiteral; import org.apache.doris.nereids.trees.expressions.literal.IntegerLiteral; import org.apache.doris.nereids.trees.expressions.literal.LargeIntLiteral; import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; @@ -54,6 +55,7 @@ import org.apache.doris.nereids.types.DateV2Type; import org.apache.doris.nereids.types.DecimalV3Type; import org.apache.doris.nereids.types.DoubleType; +import org.apache.doris.nereids.types.FloatType; import org.apache.doris.nereids.types.IntegerType; import org.apache.doris.nereids.types.SmallIntType; import org.apache.doris.nereids.types.TinyIntType; @@ -296,10 +298,197 @@ void testDoubleLiteral() { Expression rewrittenExpression = executor.rewrite(expression, context); Assertions.assertEquals(left.child(0).getDataType(), rewrittenExpression.child(1).getDataType()); Assertions.assertEquals(rewrittenExpression.child(0).getDataType(), rewrittenExpression.child(1).getDataType()); + + Expression tinyIntSlot = new SlotReference("a", TinyIntType.INSTANCE); + Expression smallIntSlot = new SlotReference("a", SmallIntType.INSTANCE); + Expression intSlot = new SlotReference("a", IntegerType.INSTANCE); + Expression bigIntSlot = new SlotReference("a", BigIntType.INSTANCE); + + // tiny int, literal not exceeds data type limit + assertRewrite(new EqualTo(new Cast(tinyIntSlot, FloatType.INSTANCE), new FloatLiteral(12.0f)), + new EqualTo(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.0f)), + new EqualTo(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThan(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new GreaterThanEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThanEqual(tinyIntSlot, new TinyIntLiteral((byte) 13))); + assertRewrite(new LessThan(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThan(tinyIntSlot, new TinyIntLiteral((byte) 13))); + assertRewrite(new LessThanEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThanEqual(tinyIntSlot, new TinyIntLiteral((byte) 12))); + + // tiny int, literal exceeds data type limit + assertRewrite(new EqualTo(new Cast(tinyIntSlot, FloatType.INSTANCE), new FloatLiteral(200.0f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.0f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new GreaterThanEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new LessThan(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.trueOrNull(tinyIntSlot)); + assertRewrite(new LessThanEqual(new Cast(tinyIntSlot, DoubleType.INSTANCE), new DoubleLiteral(200.3f)), + ExpressionUtils.trueOrNull(tinyIntSlot)); + + // small int + assertRewrite(new EqualTo(new Cast(smallIntSlot, FloatType.INSTANCE), new FloatLiteral(12.0f)), + new EqualTo(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new EqualTo(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.0f)), + new EqualTo(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new EqualTo(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + ExpressionUtils.falseOrNull(smallIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThan(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new GreaterThanEqual(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThanEqual(smallIntSlot, new SmallIntLiteral((short) 13))); + assertRewrite(new LessThan(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThan(smallIntSlot, new SmallIntLiteral((short) 13))); + assertRewrite(new LessThanEqual(new Cast(smallIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThanEqual(smallIntSlot, new SmallIntLiteral((short) 12))); + + // int + assertRewrite(new EqualTo(new Cast(intSlot, FloatType.INSTANCE), new FloatLiteral(12.0f)), + new EqualTo(intSlot, new IntegerLiteral(12))); + assertRewrite(new EqualTo(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.0f)), + new EqualTo(intSlot, new IntegerLiteral(12))); + assertRewrite(new EqualTo(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + ExpressionUtils.falseOrNull(intSlot)); + assertRewrite(new NullSafeEqual(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThan(intSlot, new IntegerLiteral(12))); + assertRewrite(new GreaterThanEqual(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThanEqual(intSlot, new IntegerLiteral(13))); + assertRewrite(new LessThan(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThan(intSlot, new IntegerLiteral(13))); + assertRewrite(new LessThanEqual(new Cast(intSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThanEqual(intSlot, new IntegerLiteral(12))); + + // big int + assertRewrite(new EqualTo(new Cast(bigIntSlot, FloatType.INSTANCE), new FloatLiteral(12.0f)), + new EqualTo(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new EqualTo(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.0f)), + new EqualTo(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new EqualTo(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + ExpressionUtils.falseOrNull(bigIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThan(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new GreaterThanEqual(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new GreaterThanEqual(bigIntSlot, new BigIntLiteral(13L))); + assertRewrite(new LessThan(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThan(bigIntSlot, new BigIntLiteral(13L))); + assertRewrite(new LessThanEqual(new Cast(bigIntSlot, DoubleType.INSTANCE), new DoubleLiteral(12.3f)), + new LessThanEqual(bigIntSlot, new BigIntLiteral(12L))); + } + + @Test + void testIntCmpDecimalV3Literal() { + executor = new ExpressionRuleExecutor(ImmutableList.of( + bottomUp(SimplifyComparisonPredicate.INSTANCE) + )); + + Expression tinyIntSlot = new SlotReference("a", TinyIntType.INSTANCE); + Expression smallIntSlot = new SlotReference("a", SmallIntType.INSTANCE); + Expression intSlot = new SlotReference("a", IntegerType.INSTANCE); + Expression bigIntSlot = new SlotReference("a", BigIntType.INSTANCE); + + // tiny int, literal not exceeds data type limit + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.0"))), + new EqualTo(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThan(tinyIntSlot, new TinyIntLiteral((byte) 12))); + assertRewrite(new GreaterThanEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThanEqual(tinyIntSlot, new TinyIntLiteral((byte) 13))); + assertRewrite(new LessThan(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThan(tinyIntSlot, new TinyIntLiteral((byte) 13))); + assertRewrite(new LessThanEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThanEqual(tinyIntSlot, new TinyIntLiteral((byte) 12))); + + // tiny int, literal exceeds data type limit + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.0"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new EqualTo(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new GreaterThanEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.falseOrNull(tinyIntSlot)); + assertRewrite(new LessThan(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.trueOrNull(tinyIntSlot)); + assertRewrite(new LessThanEqual(new Cast(tinyIntSlot, DecimalV3Type.createDecimalV3Type(4, 1)), new DecimalV3Literal(new BigDecimal("200.3"))), + ExpressionUtils.trueOrNull(tinyIntSlot)); + + // small int + assertRewrite(new EqualTo(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.0"))), + new EqualTo(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new EqualTo(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + ExpressionUtils.falseOrNull(smallIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThan(smallIntSlot, new SmallIntLiteral((short) 12))); + assertRewrite(new GreaterThanEqual(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThanEqual(smallIntSlot, new SmallIntLiteral((short) 13))); + assertRewrite(new LessThan(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThan(smallIntSlot, new SmallIntLiteral((short) 13))); + assertRewrite(new LessThanEqual(new Cast(smallIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThanEqual(smallIntSlot, new SmallIntLiteral((short) 12))); + + // int + assertRewrite(new EqualTo(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.0"))), + new EqualTo(intSlot, new IntegerLiteral(12))); + assertRewrite(new EqualTo(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + ExpressionUtils.falseOrNull(intSlot)); + assertRewrite(new NullSafeEqual(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThan(intSlot, new IntegerLiteral(12))); + assertRewrite(new GreaterThanEqual(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThanEqual(intSlot, new IntegerLiteral(13))); + assertRewrite(new LessThan(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThan(intSlot, new IntegerLiteral(13))); + assertRewrite(new LessThanEqual(new Cast(intSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThanEqual(intSlot, new IntegerLiteral(12))); + + // big int + assertRewrite(new EqualTo(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.0"))), + new EqualTo(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new EqualTo(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + ExpressionUtils.falseOrNull(bigIntSlot)); + assertRewrite(new NullSafeEqual(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + BooleanLiteral.FALSE); + assertRewrite(new GreaterThan(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThan(bigIntSlot, new BigIntLiteral(12L))); + assertRewrite(new GreaterThanEqual(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new GreaterThanEqual(bigIntSlot, new BigIntLiteral(13L))); + assertRewrite(new LessThan(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThan(bigIntSlot, new BigIntLiteral(13L))); + assertRewrite(new LessThanEqual(new Cast(bigIntSlot, DecimalV3Type.createDecimalV3Type(3, 1)), new DecimalV3Literal(new BigDecimal("12.3"))), + new LessThanEqual(bigIntSlot, new BigIntLiteral(12L))); } @Test - void testDecimalV3Literal() { + void testDecimalCmpDecimalV3Literal() { executor = new ExpressionRuleExecutor(ImmutableList.of( bottomUp(SimplifyComparisonPredicate.INSTANCE) )); diff --git a/regression-test/suites/nereids_rules_p0/expression/test_simplify_comparison_predicate.groovy b/regression-test/suites/nereids_rules_p0/expression/test_simplify_comparison_predicate.groovy new file mode 100644 index 00000000000000..af975aeeaa22e7 --- /dev/null +++ b/regression-test/suites/nereids_rules_p0/expression/test_simplify_comparison_predicate.groovy @@ -0,0 +1,170 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// TODO: date datetime comparison still has bug, need fix. +suite('test_simplify_comparison_predicate', 'nonConcurrent') { + def tbl = 'test_simplify_comparison_predicate_tbl' + def checkExplain = { expression, resExpression -> + def checker = { explainString, exception, startTime, endTime -> + assertNull(exception) + def foundOutputExprs = false + def succ = false + for (def line : explainString.split('\n')) { + if (foundOutputExprs) { + assertTrue(line.contains(resExpression), "'${line}' no contains '${resExpression}'") + succ = true + break + } + if (line.contains('OUTPUT EXPRS:')) { + foundOutputExprs = true + } + } + assertTrue(foundOutputExprs) + assertTrue(succ) + } + + explain { + sql "SELECT ${expression} FROM ${tbl}" + check checker + } + } + def testSimplify = { checkNullColumn, checkNotNullColumn, expression, resExpression -> + def types = [''] + def column = '' + if (expression.contains('{int_like_column}')) { + column = '{int_like_column}' + types = ['tinyint', 'smallint', 'int', 'bigint'] + } else if (expression.contains('{decimal_column}')) { + column = '{decimal_column}' + types = ['decimal_3_0', 'decimal_5_2'] + } else if (expression.contains('{date_column}')) { + column = '{date_column}' + types = ['date', 'datev1'] + } else if (expression.contains('{datetime_column}')) { + column = '{datetime_column}' + types = ['datetime_0', 'datetime_3', 'datetimev1'] + } + for (def type : types) { + if (type == '') { + checkExplain expression, resExpression + } else { + if (checkNullColumn) { + checkExplain expression.replace(column, "c_${type}_null"), resExpression.replace(column, "c_${type}_null") + } + if (checkNotNullColumn) { + checkExplain expression.replace(column, "c_${type}"), resExpression.replace(column, "c_${type}") + } + } + } + } + + setFeConfigTemporary([disable_datev1:false, disable_decimalv2:false]) { + sql """ + DROP TABLE IF EXISTS ${tbl} FORCE; + + CREATE TABLE ${tbl} ( + c_tinyint tinyint not null default 1, + c_tinyint_null tinyint, + c_smallint smallint not null default 1, + c_smallint_null smallint, + c_int int not null default 1, + c_int_null int, + c_bigint bigint not null default 1, + c_bigint_null bigint, + c_decimal_3_0 decimal(3, 0) not null default 1, + c_decimal_3_0_null decimal(3, 0), + c_decimal_5_2 decimal(5, 2) not null default 1, + c_decimal_5_2_null decimal(5, 2), + c_date date not null default '2000-01-01', + c_date_null date, + c_datev1 datev1 not null default '2000-01-01', + c_datev1_null datev1 null, + c_datetime_0 datetime(0) not null default '2000-01-01 00:00:00', + c_datetime_0_null datetime(0), + c_datetime_3 datetime(3) not null default '2000-01-01 00:00:00', + c_datetime_3_null datetime(3), + c_datetimev1 datetimev1 not null default '2000-01-01 00:00:00', + c_datetimev1_null datetimev1 + ) + PROPERTIES ('replication_num' = '1'); + + INSERT INTO ${tbl} VALUES(); + """ + + testSimplify true, true, '{int_like_column} = CAST(1.00 as DOUBLE)', '({int_like_column} = 1)' + testSimplify true, false, '{int_like_column} = CAST(1.01 as DOUBLE)', 'AND[{int_like_column} IS NULL,NULL]' + testSimplify false, true, '{int_like_column} = CAST(1.01 as DOUBLE)', 'FALSE' + testSimplify true, true, '{int_like_column} <=> CAST(1.01 as DOUBLE)', 'FALSE' + testSimplify true, true, '{int_like_column} > CAST(1.00 as DOUBLE)', '({int_like_column} > 1)' + testSimplify true, true, '{int_like_column} < CAST(1.00 as DOUBLE)', '({int_like_column} < 1)' + testSimplify true, true, '{int_like_column} > CAST(1.01 as DOUBLE)', '({int_like_column} > 1)' + testSimplify true, true, '{int_like_column} >= CAST(1.01 as DOUBLE)', '({int_like_column} >= 2)' + testSimplify true, true, '{int_like_column} <= CAST(1.01 as DOUBLE)', '({int_like_column} <= 1)' + testSimplify true, true, '{int_like_column} < CAST(1.01 as DOUBLE)', '({int_like_column} < 2)' + testSimplify true, true, '{int_like_column} = 1.00', '({int_like_column} = 1)' + testSimplify true, true, '{int_like_column} > 1.00', '({int_like_column} > 1)' + testSimplify true, true, '{int_like_column} < 1.00', '({int_like_column} < 1)' + testSimplify true, false, '{int_like_column} = 1.01', 'AND[{int_like_column} IS NULL,NULL]' + testSimplify false, true, '{int_like_column} = 1.01', 'FALSE' + testSimplify true, true, '{int_like_column} <=> 1.01', 'FALSE' + testSimplify true, true, '{int_like_column} > 1.01', '({int_like_column} > 1)' + testSimplify true, true, '{int_like_column} >= 1.01', '({int_like_column} >= 2)' + testSimplify true, true, '{int_like_column} <= 1.01', '({int_like_column} <= 1)' + testSimplify true, true, '{int_like_column} < 1.01', '({int_like_column} < 2)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) = CAST(1.00 as DECIMAL(10, 5))', '(c_decimal_3_0_null = 1)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) = CAST(1.1 as DECIMAL(10, 5))', 'AND[c_decimal_3_0_null IS NULL,NULL]' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) > CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_3_0_null > 1)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) >= CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_3_0_null >= 2)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) < CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_3_0_null < 2)' + testSimplify false, false, 'CAST(c_decimal_3_0_null as DECIMAL(10, 5)) <= CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_3_0_null <= 1)' + testSimplify false, false, 'c_decimal_5_2_null = CAST(1.0 as DECIMAL(10, 5))', '(c_decimal_5_2_null = 1.00)' + testSimplify false, false, 'c_decimal_5_2_null = CAST(1.1 as DECIMAL(10, 5))', '(c_decimal_5_2_null = 1.10)' + testSimplify false, false, 'c_decimal_5_2_null = CAST(1.12 as DECIMAL(10, 5))', '(c_decimal_5_2_null = 1.12)' + testSimplify false, false, 'c_decimal_5_2_null = CAST(1.123 as DECIMAL(10, 5))', 'AND[c_decimal_5_2_null IS NULL,NULL]' + testSimplify false, false, 'c_decimal_5_2 = CAST(1.123 as DECIMAL(10, 5))', 'FALSE' + testSimplify false, false, 'c_decimal_5_2_null > CAST(1.123 as DECIMAL(10, 5))', 'c_decimal_5_2_null > 1.12' + testSimplify false, false, 'c_decimal_5_2_null >= CAST(1.123 as DECIMAL(10, 5))', 'c_decimal_5_2_null >= 1.13' + testSimplify false, false, 'c_decimal_5_2_null <= CAST(1.123 as DECIMAL(10, 5))', 'c_decimal_5_2_null <= 1.12' + testSimplify false, false, 'c_decimal_5_2_null < CAST(1.123 as DECIMAL(10, 5))', 'c_decimal_5_2_null < 1.13' + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) = '2000-01-01'", "(c_datetime_0 = '2000-01-01 00:00:00')" + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) = '2000-01-01 00:00:00.1'", 'FALSE' + testSimplify false, false, "CAST(c_datetime_0_null AS DATETIME(5)) = '2000-01-01 00:00:00.1'", 'AND[c_datetime_0_null IS NULL,NULL]' + testSimplify false, false, "CAST(c_datetime_0_null AS DATETIME(5)) <=> '2000-01-01 00:00:00.1'", 'FALSE' + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) >= '2000-01-01 00:00:00.1'", "(c_datetime_0 >= '2000-01-01 00:00:01')" + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) > '2000-01-01 00:00:00.1'", "(c_datetime_0 > '2000-01-01 00:00:00')" + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) <= '2000-01-01 00:00:00.1'", "(c_datetime_0 <= '2000-01-01 00:00:00')" + testSimplify false, false, "CAST(c_datetime_0 AS DATETIME(5)) < '2000-01-01 00:00:00.1'", "(c_datetime_0 < '2000-01-01 00:00:01')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) = '2000-01-01'", "(c_datetime_3 = '2000-01-01 00:00:00.000')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) = '2000-01-01 00:00:00.1234'", 'FALSE' + testSimplify false, false, "CAST(c_datetime_3_null AS DATETIME(5)) = '2000-01-01 00:00:00.1234'", 'AND[c_datetime_3_null IS NULL,NULL]' + testSimplify false, false, "CAST(c_datetime_3_null AS DATETIME(5)) <=> '2000-01-01 00:00:00.1234'", 'FALSE' + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) >= '2000-01-01 00:00:00.1234'", "(c_datetime_3 >= '2000-01-01 00:00:00.124')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) > '2000-01-01 00:00:00.1234'", "(c_datetime_3 > '2000-01-01 00:00:00.123')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) <= '2000-01-01 00:00:00.1234'", "(c_datetime_3 <= '2000-01-01 00:00:00.123')" + testSimplify false, false, "CAST(c_datetime_3 AS DATETIME(5)) < '2000-01-01 00:00:00.1234'", "(c_datetime_3 < '2000-01-01 00:00:00.124')" + testSimplify false, false, "c_date = '2000-01-01 00:00:01'", 'FALSE' + testSimplify false, false, "CAST(c_date_null AS DATETIME(5)) = '2000-01-01 00:00:01'", 'AND[c_date_null IS NULL,NULL]' + testSimplify false, false, "CAST(c_date_null AS DATETIME(5)) <=> '2000-01-01 00:00:01'", 'FALSE' + testSimplify false, false, "CAST(c_date AS DATETIME(5)) > '2000-01-01 00:00:01'", "c_date > '2000-01-01'" + testSimplify false, false, "CAST(c_date AS DATETIME(5)) >= '2000-01-01 00:00:01'", "c_date >= '2000-01-02'" + testSimplify false, false, "CAST(c_date AS DATETIME(5)) <= '2000-01-01 00:00:01'", "c_date <= '2000-01-01'" + testSimplify false, false, "CAST(c_date AS DATETIME(5)) < '2000-01-01 00:00:01'", "c_date < '2000-01-02'" + + sql "DROP TABLE IF EXISTS ${tbl} FORCE" + } +} From ce9facb998a925fc551ddf8e03a229071ea0aa66 Mon Sep 17 00:00:00 2001 From: xzj7019 Date: Mon, 23 Dec 2024 16:21:12 +0800 Subject: [PATCH 54/82] [Improvement](tools) refine tools schema (#45778) --- .../ddl/create-tpcds-tables-sf1000.sql | 14 +++++++------- .../ddl/create-tpcds-tables-sf10000.sql | 18 +++++++++--------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/tools/tpcds-tools/ddl/create-tpcds-tables-sf1000.sql b/tools/tpcds-tools/ddl/create-tpcds-tables-sf1000.sql index 7e950580553f68..23b16480be3eb1 100644 --- a/tools/tpcds-tools/ddl/create-tpcds-tables-sf1000.sql +++ b/tools/tpcds-tools/ddl/create-tpcds-tables-sf1000.sql @@ -213,7 +213,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 3 +DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 4 PROPERTIES ( "replication_num" = "1" ); @@ -339,7 +339,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(inv_item_sk, inv_warehouse_sk) BUCKETS 1 +DISTRIBUTED BY HASH(inv_item_sk, inv_warehouse_sk) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -449,7 +449,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 1 +DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -648,7 +648,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 1 +DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -826,7 +826,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 3 +DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -1006,7 +1006,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(sr_item_sk, sr_ticket_number) BUCKETS 1 +DISTRIBUTED BY HASH(sr_item_sk, sr_ticket_number) BUCKETS 2 PROPERTIES ( "replication_num" = "1" ); @@ -1112,7 +1112,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 3 +DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 4 PROPERTIES ( "replication_num" = "1" ); diff --git a/tools/tpcds-tools/ddl/create-tpcds-tables-sf10000.sql b/tools/tpcds-tools/ddl/create-tpcds-tables-sf10000.sql index 87201403d39fb1..e6d62165f97004 100644 --- a/tools/tpcds-tools/ddl/create-tpcds-tables-sf10000.sql +++ b/tools/tpcds-tools/ddl/create-tpcds-tables-sf10000.sql @@ -213,7 +213,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 256 +DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "catalog" @@ -340,7 +340,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(inv_item_sk, inv_warehouse_sk) BUCKETS 256 +DISTRIBUTED BY HASH(inv_item_sk, inv_warehouse_sk) BUCKETS 216 PROPERTIES ( "replication_num" = "1" ); @@ -450,7 +450,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 256 +DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "catalog" @@ -485,7 +485,7 @@ CREATE TABLE IF NOT EXISTS customer_address ( ca_location_type char(20) ) DUPLICATE KEY(ca_address_sk) -DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 256 +DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 216 PROPERTIES ( "replication_num" = "1" ); @@ -650,7 +650,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 256 +DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "web" @@ -829,7 +829,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 256 +DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "web" @@ -1010,7 +1010,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(sr_item_sk, sr_ticket_number) BUCKETS 256 +DISTRIBUTED BY HASH(sr_item_sk, sr_ticket_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "store" @@ -1117,7 +1117,7 @@ PARTITION `p70` VALUES LESS THAN ("2452945"), PARTITION `p71` VALUES LESS THAN ("2452975"), PARTITION `p72` VALUES LESS THAN (MAXVALUE) ) -DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 256 +DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 216 PROPERTIES ( "replication_num" = "1", "colocate_with" = "store" @@ -1158,7 +1158,7 @@ CREATE TABLE IF NOT EXISTS customer ( c_last_review_date_sk integer ) DUPLICATE KEY(c_customer_sk) -DISTRIBUTED BY HASH(c_customer_id) BUCKETS 256 +DISTRIBUTED BY HASH(c_customer_id) BUCKETS 216 PROPERTIES ( "replication_num" = "1" ); From 81d9af08457ed2c33bd0d80d9b36ea7b25b0bc8a Mon Sep 17 00:00:00 2001 From: qiye Date: Mon, 23 Dec 2024 16:38:36 +0800 Subject: [PATCH 55/82] [test](index compaction)Fix unstable index compaction fault injection case (#45784) Problem Summary: Related PR:#45127 When set `enable_match_without_inverted_index` to `false`, `enable_common_expr_pushdown` must be `true`, if not, it will throw `[E-6001]match_any not support execute_match` error. --- ...x_compaction_exception_fault_injection.groovy | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/regression-test/suites/fault_injection_p0/test_index_compaction_exception_fault_injection.groovy b/regression-test/suites/fault_injection_p0/test_index_compaction_exception_fault_injection.groovy index b54f6374d833b8..9c0cb5aea97f0b 100644 --- a/regression-test/suites/fault_injection_p0/test_index_compaction_exception_fault_injection.groovy +++ b/regression-test/suites/fault_injection_p0/test_index_compaction_exception_fault_injection.groovy @@ -26,8 +26,6 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { def changed_variables = sql "show variables where Changed = 1" logger.info("changed variables: " + changed_variables.toString()) - // sql "UNSET GLOBAL VARIABLE ALL;" - sql "SET global enable_match_without_inverted_index = false" boolean disableAutoCompaction = false @@ -120,7 +118,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { } def run_sql = { -> - def result = sql_return_maparray "SELECT * FROM ${tableName} WHERE name MATCH 'bason'" + def result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE name MATCH 'bason'" assertEquals(3, result.size()) assertEquals(1, result[0]['id']) assertEquals("bason", result[0]['name']) @@ -129,7 +127,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals(3, result[2]['id']) assertEquals("bason", result[2]['name']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE age = 11" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE age = 11" assertEquals(3, result.size()) assertEquals(1, result[0]['id']) assertEquals("bason", result[0]['name']) @@ -138,7 +136,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals(3, result[2]['id']) assertEquals("bason", result[2]['name']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE description MATCH 'singing'" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE description MATCH 'singing'" assertEquals(3, result.size()) assertEquals("bason", result[0]['name']) assertEquals("bason is good at singing", result[0]['description']) @@ -147,7 +145,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals("bason", result[2]['name']) assertEquals("bason is good at singing", result[2]['description']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE array_contains(scores, 79)" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE array_contains(scores, 79)" assertEquals(3, result.size()) assertEquals("bason", result[0]['name']) assertEquals("[79, 85, 97]", result[0]['scores']) @@ -156,7 +154,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals("bason", result[2]['name']) assertEquals("[79, 85, 97]", result[2]['scores']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE array_contains(hobbies, 'dancing')" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE array_contains(hobbies, 'dancing')" assertEquals(3, result.size()) assertEquals("bason", result[0]['name']) assertEquals('["singing", "dancing"]', result[0]['hobbies']) @@ -165,7 +163,7 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { assertEquals("bason", result[2]['name']) assertEquals('["singing", "dancing"]', result[2]['hobbies']) - result = sql_return_maparray "SELECT * FROM ${tableName} WHERE array_contains(evaluation, 'bason is very clever')" + result = sql_return_maparray "SELECT /*+ SET_VAR(enable_match_without_inverted_index = false, enable_common_expr_pushdown = true) */ * FROM ${tableName} WHERE array_contains(evaluation, 'bason is very clever')" assertEquals(3, result.size()) assertEquals("bason", result[0]['name']) assertEquals('["bason is very clever", "bason is very healthy"]', result[0]['evaluation']) @@ -338,7 +336,5 @@ suite("test_index_compaction_exception_fault_injection", "nonConcurrent") { if (has_update_be_config) { set_be_config.call("inverted_index_compaction_enable", invertedIndexCompactionEnable.toString()) } - sql "SET global enable_match_without_inverted_index = true" } - } From f662dd0f7f7f81ca7e78c2fffc15f4c9d8060670 Mon Sep 17 00:00:00 2001 From: airborne12 Date: Mon, 23 Dec 2024 16:44:14 +0800 Subject: [PATCH 56/82] [feat](test) add tokenize ut test (#45374) Add unitest for token extractor for ngram bf index. --- be/test/olap/itoken_extractor_test.cpp | 493 +++++++++++++++++++++++++ 1 file changed, 493 insertions(+) diff --git a/be/test/olap/itoken_extractor_test.cpp b/be/test/olap/itoken_extractor_test.cpp index ea35f81973c73c..3904dbee5e766f 100644 --- a/be/test/olap/itoken_extractor_test.cpp +++ b/be/test/olap/itoken_extractor_test.cpp @@ -92,4 +92,497 @@ TEST_F(TestITokenExtractor, ngram_like_extractor) { runNextInStringLike(ngram_extractor, {from_u8string(u8"\\_手机%")}, {from_u8string(u8"_手"), from_u8string(u8"手机")}); } + +TEST_F(TestITokenExtractor, ngram_extractor_empty_input) { + // Test empty string input, expect no output + std::string statement = ""; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_single_char) { + // Only one character, less than n=2, should produce no tokens + std::string statement = "a"; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_ascii_characters) { + // Test token extraction for pure ASCII characters + std::string statement = "abcd"; + // 2-gram tokens: "ab", "bc", "cd" + std::vector expect = {"ab", "bc", "cd"}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_emoji) { + // Test scenarios that include Emoji and other multi-byte UTF-8 characters + // Assume n=2. Here "👍" is an emoji (4 bytes), "测" is a Chinese character (3 bytes). + // String: "👍测A" (3 elements: 1 Emoji, 1 Chinese char, 1 ASCII) + // For two code points per token: + // First token: "👍测" + // Second token: "测A" + std::string statement = from_u8string(u8"👍测A"); + std::vector expect = {from_u8string(u8"👍测"), from_u8string(u8"测A")}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_n_greater_than_length) { + // When n=3 and the string length is only 2, no 3-character Ngram can be formed + std::string statement = "ab"; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(3); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_chinese_only) { + // Test pure Chinese characters with multi-byte UTF-8 tokens + // String: "中国人" (3 Chinese chars, each 3 bytes) + // n=2, expected tokens: ["中国", "国人"] + std::string statement = from_u8string(u8"中国人"); + std::vector expect = {from_u8string(u8"中国"), from_u8string(u8"国人")}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_mixed_width_characters) { + // Mixed character widths: English (1 byte), Chinese (3 bytes), Emoji (4 bytes) + // String: "A中👍B" + // Code points: 'A'(1), '中'(1), '👍'(1), 'B'(1) total 4 code points + // n=2 tokens: "A中", "中👍", "👍B" + std::string statement = from_u8string(u8"A中👍B"); + std::vector expect = {from_u8string(u8"A中"), from_u8string(u8"中👍"), + from_u8string(u8"👍B")}; + NgramTokenExtractor ngram_extractor(2); + runNextInString(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_empty_input) { + // Test empty input for like extraction + std::string statement = ""; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_no_pattern) { + // No % or _, equivalent to extracting n-length sequences. + // String: "abc", n=2, theoretically extract "ab", "bc" + // next_in_string_like requires n code points to return a token. + // Without % or _, it should still extract normally. + std::string statement = "abc"; + // n=2: extract "ab", then "bc" + std::vector expect = {"ab", "bc"}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_pattern1) { + // No % or _, equivalent to extracting n-length sequences. + // String: "abc", n=2, theoretically extract "ab", "bc" + // next_in_string_like requires n code points to return a token. + // Without % or _, it should still extract normally. + std::string statement = "%abc%def%gh%"; + // n=2: extract "ab", then "bc" + std::vector expect = {"ab", "bc", "de", "ef", "gh"}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_patterns_only) { + // String has only '%' and '_', no normal chars to form a 2-gram + // "%__%", n=2: % and _ are not considered normal token characters + // Each encounter of % resets the token, so no tokens are generated + std::string statement = "%__%"; + std::vector expect = {}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_escaped_characters) { + // Test scenarios with escape characters: "\\%abc% \\_xyz_" + // Escaped '%' should be treated as a normal character, similarly for '_' + // Suppose n=2, for "\\%abc%": + // Initially encounter '\\%' => escaped '%', include it in token: "%a" + // Then 'a'(1 byte) 'b'(1 byte) form "ab", 'c'(1 byte) continues... + // A bit complex example, mainly to demonstrate properly handling escaped chars. + std::string statement = from_u8string(u8"\\%手机% \\_人_"); + // Analysis: + // "\\%" -> escaped '%', token gets "%" + // then "手"(1 code point), "机"(1 code point). Once 2 code points are formed, we have "%手" + // Move pos. Next token starts from "机": + // '机'(1 code point) + // Next is '%', encountering '%', reset token, skip over ' '... + // Next segment: "\\_人_" + // "\\_" => escaped '_', token gets "_" + // '人'(1 code point) + '_' pattern encountered resets token after outputting "_人" + // Final result: {"%手", "_人"} + // Note: Based on logic, pattern chars % and _ reset the token. After a token is output, + // encountering % or _ resets the token to empty, not affecting previously output tokens. + std::vector expect = {"%手", "手机", " _", "_人"}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_like_extractor_complex_pattern) { + // Complex scenario: "abc%中_\\%国%d" + // n=2 analysis: + // Start from the beginning: 'a'(1 code point), 'b'(1 code point) => "ab" output + // Encounter 'c' then '%', at '%' reset token and move forward + // Next: "中"(1 code point), '_' is pattern reset + // Then "\\%" => '%'(1 code point), '国'(1 code point) => "%国" output + // Encounter '%', reset token + // Finally 'd' alone is not enough to form 2 code points, no output + std::string statement = from_u8string(u8"abc%中_\\%国%d"); + std::vector expect = {"ab", "bc", "%国"}; + NgramTokenExtractor ngram_extractor(2); + runNextInStringLike(ngram_extractor, statement, expect); +} + +TEST_F(TestITokenExtractor, ngram_extractor_different_n) { + // Test different n values + // String: "abcd" + // n=3: extract "abc", "bcd" + std::string statement = "abcd"; + std::vector expect = {"abc", "bcd"}; + NgramTokenExtractor ngram_extractor(3); + runNextInString(ngram_extractor, statement, expect); +} + +std::string get_repetition_info(const std::string& text, size_t n) { + NgramTokenExtractor ngram_extractor(n); + std::vector tokens; + + { + size_t pos = 0; + size_t token_start = 0; + size_t token_length = 0; + while (ngram_extractor.next_in_string(text.c_str(), text.size(), &pos, &token_start, + &token_length)) { + tokens.push_back(text.substr(token_start, token_length)); + } + } + + std::unordered_map token_count; + for (auto& t : tokens) { + token_count[t]++; + } + + int total_tokens = static_cast(tokens.size()); + int repeated_tokens = 0; + for (auto& kv : token_count) { + if (kv.second > 1) { + repeated_tokens += kv.second; + } + } + + double repetition_rate = 0.0; + if (total_tokens > 0) { + repetition_rate = static_cast(repeated_tokens) / total_tokens; + } + + std::ostringstream oss; + oss << "Total tokens: " << total_tokens << "\n" + << "Repeated tokens: " << repeated_tokens << "\n" + << "Repetition rate: " << repetition_rate << "\n"; + + return oss.str(); +} + +TEST_F(TestITokenExtractor, ngram_extractor_repetition_rate_matchine_text) { + std::string statement = + "Exception=System.CannotUnloadAppDomain;\n" + "HResult=0x00007486;\n" + "Message=exception happened;\n" + "Source=BenchmarkLogGenerator;\n" + "StackTrace:\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85\n" + " at BenchmarkLogGenerator.Flows.BootFlow.d__1.MoveNext() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 47\n" + " at BenchmarkLogGenerator.Scheduler.Flow.NextStep() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 74\n" + " at BenchmarkLogGenerator.Scheduler.Step.EnqueueNextStep(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 112\n" + " at BenchmarkLogGenerator.Scheduler.FlowDelayStep.Execute(Scheduler scheduler) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 137\n" + " at BenchmarkLogGenerator.Scheduler.Run() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Scheduler.cs:line 28\n" + " at BenchmarkLogGenerator.Generator.Run(Int32 sizeFactor) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 84\n" + " at BenchmarkLogGenerator.Generator.<>c__DisplayClass26_0.b__0() in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Generator.cs:line 74\n" + " at System.Threading.ThreadHelper.ThreadStart_Context(Object state)\n" + " at System.Threading.ExecutionContext.RunInternal(ExecutionContext executionContext)\n" + " at BenchmarkLogGenerator.Flows.BootFlow.GetLevel(Int64 v) in " + "C:\\Src\\Tools\\BenchmarkLogGenerator\\Flows\\BootFlow.cs:line 85"; + size_t n = 5; + std::string info = get_repetition_info(statement, n); + + std::cout << info << std::endl; +} + +TEST_F(TestITokenExtractor, ngram_extractor_repetition_rate_short_text) { + std::string statement = + "I bought these leggings for my daughter @ Christmas along with several other " + "leggings. She liked these leggings the best since they were lined and are very warm. " + " She is 5'3" and 115 lbs. and they fit her very well/comfortable. The only thing " + "I disliked about them is that the pattern is not uniform on both legs as it gets to " + "your upper thigh area."; + size_t n = 5; + std::string info = get_repetition_info(statement, n); + + std::cout << info << std::endl; +} + +TEST_F(TestITokenExtractor, ngram_extractor_repetition_rate_medium_text) { + std::string statement = + "Loving the fabulous and exquisite women's wear for plus size women, because of how " + "this sweater makes you feel good about yourself, and speaks to her heart with a " + "positive perspective, given her overall character as well." + "I bought these leggings for my daughter @ Christmas along with several other " + "leggings. She liked these leggings the best since they were lined and are very warm. " + " She is 5'3" and 115 lbs. and they fit her very well/comfortable. The only thing " + "I disliked about them is that the pattern is not uniform on both legs as it gets to " + "your upper thigh area." + "Love my boot cuffs I got as a gift. This is one I won’t be re-gifting. People at work " + "love it, good quality and good value. Love that it’s reversible and I can wear it " + "with any size boots." + "Reminds me of being 13 in the early 80's, only these are more attractive. These leg " + "warmers are exactly as pictured, soft & warm over my jeans to keep out the chill on " + "this snowy day. Brand new in package & I am very happy with this purchase. I will " + "buy another pair to double up the warmth on my bare legs." + "I couldn't be happier with this dress. It is the epitome of classic WW2 era ladies " + "fashion.
The material is lightweight, yet very soft and silky. It has a full " + "lining to it. I would
recommend sizing up on this particular
style as it " + "has a way of hugging your
curves, and in the midsection .

If you have " + "a perfectly flat stomach, then No worries.
But ladies who have a wee bit of a " + "pouch inFront, this dress may hug you a tad in the tummy.
It hangs very nicely " + "in back, and flows
beautifully. Honestly , i would order one in
every " + "color of the rainbow if they sold
them !
I love it, Thank You!
This is " + "my 4th dress from this vendor, and
by far my favorite." + "This tie is super cute! I love the color and the design... but that's about it.

The day after receiving it in the mail I strapped it on and wore it to work. " + "Within the first few hours I noticed the little white Vs began to fray and frizz. By " + "the end if the day most of white threading had completely frayed out. This tie was " + "very, very cheaply made.

It's a shame, because it is... or was... a very " + "good-looking bow tie!" + "The color and pictures looks very good. It fits really nicely with a bit of stretch " + "in the material. I was afraid after washing it that the colors would fade but it did " + "not. I highly recommand it t!!!" + "I just purchased this coat, and I have to say that so far, I am very satisfied with " + "it. The belt is a nice added touch, but not necessary to wear. This coat keeps me " + "very warm, and with the winter we're having this year, it's been a life saver. I " + "have gotten compliments on how it looks as well. This is replacing another coat that " + "had a zipper that broke after two winters of wearing it, so I am being extra careful " + "when zippering up this one. It's too soon to say how sturdy the zipper is on this " + "one, but as far as everything else, it's serving its purpose well. I highly " + "recommend it for the quality and price." + "ABSOLUTELY JUNK! wore it about four times then the hood nearly ripped completely off! " + "The Seam came out completely! DO NOT BUY WOULD LOVE TO HAVE MY MONEY COMPLETELY " + "REFUNDED!" + "this was the worst thing I brought online
it was very cheaply made size not " + "true brought
as a gift was so embarrassing the person did not accept the gift
the fur inside looked real fake I am stuck with this one" + "Honestly the most comfortable jacket I've ever worn. Will probably buy this jacket " + "for the rest of my life. End of story" + "ok Im trying to figure out if this is women or unisex sizing..This has a man wearing " + "it but it clearly is for a girl. I need to know before I order." + "Very comfortable and cute! It works well in school uniform and everyday wear. The " + "light material and zippers on the shoulders are super unique and welcomed addition to " + "my otherwise drab uniform!" + "The color is active. THe style is ok.
One thing to remember is to order one size " + "bigger than your regular size. For example, I wear S and the size M is OK ON me" + "These are actually considered panty hose. Unless you are using under a dress or a " + "very long sweater dont buy. Leggins is not the right description!!!''" + "Nice Dress" + "I am overall happy with the leggings. But be aware that if you are larger then a size " + "8, these will be too small for you. I am a size 8 and they just fit. The pattern is " + "stretched out quite a bit, but I think it still looks pretty good even tho the " + "pattern stretch out is not quite as bright and crisp. No complaints about the length " + "for me. I am 5'7" and these leggings reach my ankles without the feeling that " + "they are going to pull off of my hips." + "I bought these jeans knowing they were marked 'irregular' and thought there would be " + "a noticeable flaw. But when I received these jeans I was pleasantly surprised. They " + "look great and I couldn't find a flaw. The only thing I noticed was that the jeans " + "fit a bit tight around my butt. This is my first pair of big star jeans so it could " + "just be how they fit but I'm not sure. Other than that, these jeans are great for the " + "price." + "great scarf for price, ships quickly, color is more turquoise, than baby blue. really " + "like the chevron design lots of compliments." + "The fit of these leggings is excellent, they are extremely comfortable and true to " + "size. Not a skinny girl's legging, there's room to breathe. The classy, paisley " + "pattern makes regular black leggings seem boring. Good material and the design is " + "done nicely. An excellent buy, thanks Amazon." + "The dress is gorgeous and the mesh hearts are awesome. the material was a little " + "surprising, but its really cool" + "It did take long to get though well worth the wait... This was a gift for my daughter " + "and she loved it!! No issues with the product !" + "I love this sweater. I bought it for my daughter and she loves it. The colors are " + "very bright and I will surely be purchasing more from this seller." + "I bought this sweater in this color and in black in medium. I wear a medium. I " + "tried on the black first and the entire sweater fell apart as I was putting it on! " + "It literally came apart at the seams!" + "This wallet is nice looking and has the strongest chain I have ever seen. However, " + "it
simply has too few wallets for credit cards, so I sent it back. Others, " + "however may like
it, so check it out anyway." + "My husband loves his new scarf, as it is so extremely soft and warm. He was even " + "willing to give up his favorite scarf, which he has worn for years, for this one. It " + "adds just the right amount of color at the neckline of his black wool overcoat to " + "wear to the office." + "This dress appears to be quite beautiful in picture but is not. The materials was not " + "very nice, looked a bit cheap. as well the overall fit was not very nice. Had the " + "materials been of slightly better quality, it would have made up for some minor " + "imperfections. The dress runs very very small. I am an xs/s typically and thought " + "this was just too too tight and uncomfortable." + "Very nice scarves. Only complaint would be the description says one is purple but it " + "is actually a burgandy color." + "I ordered a large which is my usual size and found the arms to really tight even " + "without a winter sweater.
Poor quality - strings and "pulls" everywhere" + "Thank you so much for my my beautiful dress. The fit was perfect. The detail of the " + "dress was exactly like the picture. Also the dress was delivered before time. Thanks " + "again and I will be making future purchases very soon.5 stars for sure." + "this is a great looking shirt but i wish they had it in a medium i would definatley " + "spend my money if it was smaller" + "Purchased this for my granddaughter, and she simply loves it! People tell her, she " + "looks like a "Pop Star" because of the design and even mention she looks like " + "Michael Jackson! All she needs is to learn how to sing and dance!" + "At first I was worried that they would not stay up, but that was not a problem. I " + "wish they were available in a calf length for boots" + "I purchased this hat, more for a joke then keeping warm. The hat and beard are well " + "made. Looks cool. I don't think the beard would really do much to keep your face " + "warm. My buddies all got a laugh when I showed up wearing it." + "The actual shorts and ordering process was great but listed measurements diddnt match " + "up. I ordered the nxt size up and still too small." + "If you are looking for stretchy these aren't it so make sure to order right size. " + "Because of the fleece material inside they slide down constantly. Not too happy. But " + "they are pretty." + "So I have a 45+ inch chest and a 31 inch waist. Some would say that I'm athletically " + "proportioned. I will never find anything that fits me the way that it's supposed to " + "fit but this hoodie came damn near close. The US XL is nearly perfect for me. It " + "tappers around the waist as advertise even for broader guy like myself. My only quirk " + "is the collar around the hood gives a "no neck" appearance. But it's growing " + "on me. So as I said "nearly perfect"." + "This hat was purchased for my nephew for Christmas. It barely made it through " + "Christmas Eve. The fabric is extremely flimsy and there was a giant hole in it after " + "one or two times he put it on. I was able to get Amazon to refund my money, but not " + "worth the purchase. Very flimsy material." + "Got these for my mom and she wears them all the time. cute and comfy. I will borrow " + "them from her soon." + "first, the color is not like the picture above, the material of the shirt looks so " + "cheap and uncomfortable, the lace also looks so cheap.
second, at least use a " + "better material, the product really don't looks like the picture and not worthy at all" + "I purchased for my daughter and she loves it! This is a very high quality product " + "and worth the cost. I certainly would not pay $500 as the suggested price but " + "certainly worth the $160 paid. It did take nearly one month to arrive." + "The elastic material is comfortable, fits great on me . The straps are detachable so " + "you can have it cross your back or go bare." + "This blazer was poorly sewn together. The metal closure fell off when trying it on " + "for the first time. The material was uneven in length. This was a disappointing " + "purchase." + "I'm wearing this with my steelers t-shirt when I go to Vegas in a couple of weeks to " + "represent my team even though we not in the super bowl" + "I ordered a 3X. Normally a 2X will fit me in most clothing, but I order 3X when " + "available. This was tight and very,very thin. I returned it." + "This hood is super adorable and I love the pink/gray combination. There are just 2 " + "small things that I wasn't thrilled about. 1) The hood itself is just a tad small. 2) " + "The back part is cut kinda short leaving my neck a tinse exposed but I just pushed " + "the hood further back on my head and got a bit more coverage out of it. But I can " + "live with those things because it is super cute!" + "Love the color, cut and style of these gloves. They keep my hands warm without " + "restricting the use of my fingers for keying, sorting , etc. I think they are the " + "smartest buy I've made all winter!" + "so sucks the quality
the color is not like the picture above and the fur makes " + "it looks so cheap" + "And they look great on me! LOL They are simple with a classic look to them. I'll " + "probably pair with similar color shoes." + "The size was at least two sizes smaller than the printed size. They do not shape " + "well. I was very disappointed."; + size_t n = 5; + std::string info = get_repetition_info(statement, n); + + std::cout << info << std::endl; +} } // namespace doris From 6403b3c04bb71c0081e401b46cc70db1d05a2636 Mon Sep 17 00:00:00 2001 From: James Date: Mon, 23 Dec 2024 17:07:04 +0800 Subject: [PATCH 57/82] [fix](auditlog)Fix auditlog NPE (#45772) ### What problem does this PR solve? Don't call MetricRepo if it is not initialized to avoid NPE. Issue Number: close #xxx Related PR: #xxx Problem Summary: ### Release note None --- .../org/apache/doris/qe/AuditLogHelper.java | 62 ++++++++++--------- .../ColumnStatisticsCacheLoader.java | 2 +- 2 files changed, 33 insertions(+), 31 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java index f29c617c15861e..cd1be6c5cb6df7 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java @@ -216,40 +216,42 @@ private static void logAuditLogImpl(ConnectContext ctx, String origStmt, Stateme .setCommandType(ctx.getCommand().toString()); if (ctx.getState().isQuery()) { - if (!ctx.getSessionVariable().internalSession) { - MetricRepo.COUNTER_QUERY_ALL.increase(1L); - MetricRepo.USER_COUNTER_QUERY_ALL.getOrAdd(ctx.getQualifiedUser()).increase(1L); - } - try { - if (Config.isCloudMode()) { - cloudCluster = ctx.getCloudCluster(false); - } - } catch (ComputeGroupException e) { - LOG.warn("Failed to get cloud cluster", e); - return; - } - MetricRepo.increaseClusterQueryAll(cloudCluster); - if (ctx.getState().getStateType() == MysqlStateType.ERR - && ctx.getState().getErrType() != QueryState.ErrType.ANALYSIS_ERR) { - // err query + if (MetricRepo.isInit) { if (!ctx.getSessionVariable().internalSession) { - MetricRepo.COUNTER_QUERY_ERR.increase(1L); - MetricRepo.USER_COUNTER_QUERY_ERR.getOrAdd(ctx.getQualifiedUser()).increase(1L); - MetricRepo.increaseClusterQueryErr(cloudCluster); + MetricRepo.COUNTER_QUERY_ALL.increase(1L); + MetricRepo.USER_COUNTER_QUERY_ALL.getOrAdd(ctx.getQualifiedUser()).increase(1L); } - } else if (ctx.getState().getStateType() == MysqlStateType.OK - || ctx.getState().getStateType() == MysqlStateType.EOF) { - // ok query - if (!ctx.getSessionVariable().internalSession) { - MetricRepo.HISTO_QUERY_LATENCY.update(elapseMs); - MetricRepo.USER_HISTO_QUERY_LATENCY.getOrAdd(ctx.getQualifiedUser()).update(elapseMs); - MetricRepo.updateClusterQueryLatency(cloudCluster, elapseMs); + try { + if (Config.isCloudMode()) { + cloudCluster = ctx.getCloudCluster(false); + } + } catch (ComputeGroupException e) { + LOG.warn("Failed to get cloud cluster", e); + return; } + MetricRepo.increaseClusterQueryAll(cloudCluster); + if (ctx.getState().getStateType() == MysqlStateType.ERR + && ctx.getState().getErrType() != QueryState.ErrType.ANALYSIS_ERR) { + // err query + if (!ctx.getSessionVariable().internalSession) { + MetricRepo.COUNTER_QUERY_ERR.increase(1L); + MetricRepo.USER_COUNTER_QUERY_ERR.getOrAdd(ctx.getQualifiedUser()).increase(1L); + MetricRepo.increaseClusterQueryErr(cloudCluster); + } + } else if (ctx.getState().getStateType() == MysqlStateType.OK + || ctx.getState().getStateType() == MysqlStateType.EOF) { + // ok query + if (!ctx.getSessionVariable().internalSession) { + MetricRepo.HISTO_QUERY_LATENCY.update(elapseMs); + MetricRepo.USER_HISTO_QUERY_LATENCY.getOrAdd(ctx.getQualifiedUser()).update(elapseMs); + MetricRepo.updateClusterQueryLatency(cloudCluster, elapseMs); + } - if (elapseMs > Config.qe_slow_log_ms) { - String sqlDigest = DigestUtils.md5Hex(((Queriable) parsedStmt).toDigest()); - auditEventBuilder.setSqlDigest(sqlDigest); - MetricRepo.COUNTER_QUERY_SLOW.increase(1L); + if (elapseMs > Config.qe_slow_log_ms) { + String sqlDigest = DigestUtils.md5Hex(((Queriable) parsedStmt).toDigest()); + auditEventBuilder.setSqlDigest(sqlDigest); + MetricRepo.COUNTER_QUERY_SLOW.increase(1L); + } } } auditEventBuilder.setIsQuery(true) diff --git a/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatisticsCacheLoader.java b/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatisticsCacheLoader.java index 692d723ed0a9f5..8e29fe25d72574 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatisticsCacheLoader.java +++ b/fe/fe-core/src/main/java/org/apache/doris/statistics/ColumnStatisticsCacheLoader.java @@ -42,7 +42,7 @@ protected Optional doLoad(StatisticsCacheKey key) { columnStatistic = table.getColumnStatistic(key.colName); } } catch (Throwable t) { - LOG.warn("Failed to load stats for column [Catalog:{}, DB:{}, Table:{}, Column:{}], Reason: {}", + LOG.info("Failed to load stats for column [Catalog:{}, DB:{}, Table:{}, Column:{}], Reason: {}", key.catalogId, key.dbId, key.tableId, key.colName, t.getMessage()); if (LOG.isDebugEnabled()) { LOG.debug(t); From 2a3a4392496ef5c0811f346560f4dcfe44221d07 Mon Sep 17 00:00:00 2001 From: zhiqiang Date: Mon, 23 Dec 2024 17:31:10 +0800 Subject: [PATCH 58/82] [refactor](metrics) Remove IntAtomicCounter & CoreLocal (#45742) ### What problem does this PR solve? 1. Remove `IntAtomicCounter`, it is equal to `IntCounter`. 2. Remove `CoreLocal` related code. It is not used any more. --- be/src/olap/lru_cache.cpp | 12 +- be/src/olap/lru_cache.h | 8 +- .../workload_group/workload_group_metrics.cpp | 17 +- .../workload_group/workload_group_metrics.h | 12 +- be/src/util/core_local.cpp | 129 -------------- be/src/util/core_local.h | 162 ------------------ be/src/util/doris_metrics.cpp | 22 +-- be/src/util/doris_metrics.h | 22 +-- be/src/util/metrics.h | 128 ++------------ be/src/util/system_metrics.cpp | 138 +++++++-------- be/test/util/core_local_test.cpp | 122 ------------- be/test/util/doris_metrics_test.cpp | 10 +- be/test/util/metrics_test.cpp | 8 +- 13 files changed, 141 insertions(+), 649 deletions(-) delete mode 100644 be/src/util/core_local.cpp delete mode 100644 be/src/util/core_local.h delete mode 100644 be/test/util/core_local_test.cpp diff --git a/be/src/olap/lru_cache.cpp b/be/src/olap/lru_cache.cpp index e539f4a440ab0c..9895a0138947be 100644 --- a/be/src/olap/lru_cache.cpp +++ b/be/src/olap/lru_cache.cpp @@ -604,12 +604,12 @@ ShardedLRUCache::ShardedLRUCache(const std::string& name, size_t capacity, LRUCa INT_GAUGE_METRIC_REGISTER(_entity, cache_capacity); INT_GAUGE_METRIC_REGISTER(_entity, cache_usage); INT_GAUGE_METRIC_REGISTER(_entity, cache_element_count); - INT_DOUBLE_METRIC_REGISTER(_entity, cache_usage_ratio); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_entity, cache_lookup_count); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_entity, cache_hit_count); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_entity, cache_stampede_count); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_entity, cache_miss_count); - INT_DOUBLE_METRIC_REGISTER(_entity, cache_hit_ratio); + DOUBLE_GAUGE_METRIC_REGISTER(_entity, cache_usage_ratio); + INT_COUNTER_METRIC_REGISTER(_entity, cache_lookup_count); + INT_COUNTER_METRIC_REGISTER(_entity, cache_hit_count); + INT_COUNTER_METRIC_REGISTER(_entity, cache_stampede_count); + INT_COUNTER_METRIC_REGISTER(_entity, cache_miss_count); + DOUBLE_GAUGE_METRIC_REGISTER(_entity, cache_hit_ratio); _hit_count_bvar.reset(new bvar::Adder("doris_cache", _name)); _hit_count_per_second.reset(new bvar::PerSecond>( diff --git a/be/src/olap/lru_cache.h b/be/src/olap/lru_cache.h index 303a4cf2065ef9..4a4b6ddd0054f3 100644 --- a/be/src/olap/lru_cache.h +++ b/be/src/olap/lru_cache.h @@ -447,10 +447,10 @@ class ShardedLRUCache : public Cache { IntGauge* cache_usage = nullptr; IntGauge* cache_element_count = nullptr; DoubleGauge* cache_usage_ratio = nullptr; - IntAtomicCounter* cache_lookup_count = nullptr; - IntAtomicCounter* cache_hit_count = nullptr; - IntAtomicCounter* cache_miss_count = nullptr; - IntAtomicCounter* cache_stampede_count = nullptr; + IntCounter* cache_lookup_count = nullptr; + IntCounter* cache_hit_count = nullptr; + IntCounter* cache_miss_count = nullptr; + IntCounter* cache_stampede_count = nullptr; DoubleGauge* cache_hit_ratio = nullptr; // bvars std::unique_ptr> _hit_count_bvar; diff --git a/be/src/runtime/workload_group/workload_group_metrics.cpp b/be/src/runtime/workload_group/workload_group_metrics.cpp index 18ff7aa2f4f185..0f7322b7feb448 100644 --- a/be/src/runtime/workload_group/workload_group_metrics.cpp +++ b/be/src/runtime/workload_group/workload_group_metrics.cpp @@ -36,32 +36,31 @@ WorkloadGroupMetrics::WorkloadGroupMetrics(WorkloadGroup* wg) { _cpu_time_metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::SECONDS, "workload_group_cpu_time_sec"); - _cpu_time_counter = - (IntAtomicCounter*)(_entity->register_metric(_cpu_time_metric.get())); + _cpu_time_counter = (IntCounter*)(_entity->register_metric(_cpu_time_metric.get())); _mem_used_bytes_metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::BYTES, "workload_group_mem_used_bytes"); - _mem_used_bytes_counter = (IntAtomicCounter*)(_entity->register_metric( - _mem_used_bytes_metric.get())); + _mem_used_bytes_counter = + (IntCounter*)(_entity->register_metric(_mem_used_bytes_metric.get())); _local_scan_bytes_metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::BYTES, "workload_group_local_scan_bytes"); - _local_scan_bytes_counter = (IntAtomicCounter*)(_entity->register_metric( - _local_scan_bytes_metric.get())); + _local_scan_bytes_counter = + (IntCounter*)(_entity->register_metric(_local_scan_bytes_metric.get())); _remote_scan_bytes_metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::BYTES, "workload_group_remote_scan_bytes"); - _remote_scan_bytes_counter = (IntAtomicCounter*)(_entity->register_metric( - _remote_scan_bytes_metric.get())); + _remote_scan_bytes_counter = + (IntCounter*)(_entity->register_metric(_remote_scan_bytes_metric.get())); for (const auto& [key, io_throttle] : wg->_scan_io_throttle_map) { std::unique_ptr metric = std::make_unique( doris::MetricType::COUNTER, doris::MetricUnit::BYTES, "workload_group_local_scan_bytes_" + io_throttle->metric_name()); _local_scan_bytes_counter_map[key] = - (IntAtomicCounter*)(_entity->register_metric(metric.get())); + (IntCounter*)(_entity->register_metric(metric.get())); _local_scan_bytes_metric_map[key] = std::move(metric); } } diff --git a/be/src/runtime/workload_group/workload_group_metrics.h b/be/src/runtime/workload_group/workload_group_metrics.h index e68715df249dee..c761638d115439 100644 --- a/be/src/runtime/workload_group/workload_group_metrics.h +++ b/be/src/runtime/workload_group/workload_group_metrics.h @@ -28,7 +28,7 @@ class WorkloadGroup; template class AtomicCounter; -using IntAtomicCounter = AtomicCounter; +using IntCounter = AtomicCounter; class MetricEntity; struct MetricPrototype; @@ -65,11 +65,11 @@ class WorkloadGroupMetrics { // _local_disk_io_metric is every disk's IO std::map> _local_scan_bytes_metric_map; - IntAtomicCounter* _cpu_time_counter {nullptr}; // used for metric - IntAtomicCounter* _mem_used_bytes_counter {nullptr}; // used for metric - IntAtomicCounter* _local_scan_bytes_counter {nullptr}; // used for metric - IntAtomicCounter* _remote_scan_bytes_counter {nullptr}; // used for metric - std::map _local_scan_bytes_counter_map; // used for metric + IntCounter* _cpu_time_counter {nullptr}; // used for metric + IntCounter* _mem_used_bytes_counter {nullptr}; // used for metric + IntCounter* _local_scan_bytes_counter {nullptr}; // used for metric + IntCounter* _remote_scan_bytes_counter {nullptr}; // used for metric + std::map _local_scan_bytes_counter_map; // used for metric std::atomic _cpu_time_nanos {0}; std::atomic _last_cpu_time_nanos {0}; diff --git a/be/src/util/core_local.cpp b/be/src/util/core_local.cpp deleted file mode 100644 index 1c4b1dd04715b4..00000000000000 --- a/be/src/util/core_local.cpp +++ /dev/null @@ -1,129 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -#include "util/core_local.h" - -#include -#include -#include -#include - -#include "common/compiler_util.h" // IWYU pragma: keep -#include "common/logging.h" -#include "util/spinlock.h" -#include "util/sse_util.hpp" - -namespace doris { - -constexpr int BLOCK_SIZE = 4096; -struct alignas(CACHE_LINE_SIZE) CoreDataBlock { - void* at(size_t offset) { return data + offset; } - char data[BLOCK_SIZE]; - - static void* operator new(size_t nbytes) { - void* p = nullptr; - if (posix_memalign(&p, alignof(CoreDataBlock), nbytes) == 0) { - return p; - } - throw std::bad_alloc(); - } - - static void operator delete(void* p) { free(p); } -}; - -template -class CoreDataAllocatorImpl : public CoreDataAllocator { -public: - virtual ~CoreDataAllocatorImpl(); - void* get_or_create(size_t id) override { - size_t block_id = id / ELEMENTS_PER_BLOCK; - { - std::lock_guard l(_lock); - if (block_id >= _blocks.size()) { - _blocks.resize(block_id + 1); - } - } - CoreDataBlock* block = _blocks[block_id]; - if (block == nullptr) { - std::lock_guard l(_lock); - block = _blocks[block_id]; - if (block == nullptr) { - block = new CoreDataBlock(); - _blocks[block_id] = block; - } - } - size_t offset = (id % ELEMENTS_PER_BLOCK) * ELEMENT_BYTES; - return block->at(offset); - } - -private: - static constexpr int ELEMENTS_PER_BLOCK = BLOCK_SIZE / ELEMENT_BYTES; - SpinLock _lock; // lock to protect the modification of _blocks - std::vector _blocks; -}; - -template -CoreDataAllocatorImpl::~CoreDataAllocatorImpl() { - for (auto block : _blocks) { - delete block; - } -} - -CoreDataAllocatorFactory* CoreDataAllocatorFactory::instance() { - static CoreDataAllocatorFactory _s_instance; - return &_s_instance; -} - -CoreDataAllocator* CoreDataAllocatorFactory::get_allocator(size_t cpu_idx, size_t data_bytes) { - std::lock_guard l(_lock); - auto pair = std::make_pair(cpu_idx, data_bytes); - auto it = _allocators.find(pair); - if (it != std::end(_allocators)) { - return it->second; - } - CoreDataAllocator* allocator = nullptr; - switch (data_bytes) { - case 1: - allocator = new CoreDataAllocatorImpl<1>(); - break; - case 2: - allocator = new CoreDataAllocatorImpl<2>(); - break; - case 3: - case 4: - allocator = new CoreDataAllocatorImpl<4>(); - break; - case 5: - case 6: - case 7: - case 8: - allocator = new CoreDataAllocatorImpl<8>(); - break; - default: - DCHECK(false) << "don't support core local value for this size, size=" << data_bytes; - } - _allocators.emplace(pair, allocator); - return allocator; -} - -CoreDataAllocatorFactory::~CoreDataAllocatorFactory() { - for (auto& it : _allocators) { - delete it.second; - } -} - -} // namespace doris diff --git a/be/src/util/core_local.h b/be/src/util/core_local.h deleted file mode 100644 index 1610ae5a0bb046..00000000000000 --- a/be/src/util/core_local.h +++ /dev/null @@ -1,162 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -#pragma once - -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include - -#include "common/compiler_util.h" // IWYU pragma: keep - -namespace doris { - -class CoreDataAllocator { -public: - virtual ~CoreDataAllocator() {} - virtual void* get_or_create(size_t id) = 0; -}; - -class CoreDataAllocatorFactory { -public: - CoreDataAllocatorFactory() {} - ~CoreDataAllocatorFactory(); - CoreDataAllocator* get_allocator(size_t cpu_id, size_t data_bytes); - static CoreDataAllocatorFactory* instance(); - -private: - DISALLOW_COPY_AND_ASSIGN(CoreDataAllocatorFactory); - -private: - std::mutex _lock; - std::map, CoreDataAllocator*> _allocators; -}; - -template -class CoreLocalValueController { -public: - CoreLocalValueController() { - int num_cpus = static_cast(std::thread::hardware_concurrency()); - _size = 8; - while (_size < num_cpus) { - _size <<= 1; - } - _allocators.resize(_size, nullptr); - for (int i = 0; i < _size; ++i) { - _allocators[i] = CoreDataAllocatorFactory::instance()->get_allocator(i, sizeof(T)); - } - } - - ~CoreLocalValueController() {} - - int get_id() { - std::lock_guard l(_lock); - int id = 0; - if (_free_ids.empty()) { - id = _next_id++; - } else { - id = _free_ids.back(); - _free_ids.pop_back(); - } - return id; - } - void reclaim_id(int id) { - std::lock_guard l(_lock); - _free_ids.push_back(id); - } - size_t size() const { return _size; } - CoreDataAllocator* allocator(int i) const { return _allocators[i]; } - - static CoreLocalValueController* instance() { - static CoreLocalValueController _s_instance; - return &_s_instance; - } - -private: - DISALLOW_COPY_AND_ASSIGN(CoreLocalValueController); - -private: - std::mutex _lock; - int _next_id = 0; - std::deque _free_ids; - std::vector _allocators; - size_t _size; -}; - -template -class CoreLocalValue { -public: - CoreLocalValue(const T init_value = T()) { - CoreLocalValueController* controller = CoreLocalValueController::instance(); - _id = controller->get_id(); - _size = controller->size(); - _values.resize(_size, nullptr); - for (int i = 0; i < _size; ++i) { - void* ptr = controller->allocator(i)->get_or_create(_id); - _values[i] = new (ptr) T(init_value); - } - } - - ~CoreLocalValue() { - for (int i = 0; i < _size; ++i) { - _values[i]->~T(); - } - CoreLocalValueController::instance()->reclaim_id(_id); - } - - size_t size() const { return _size; } - T* access() const { -#ifdef __APPLE__ - size_t cpu_id = 0; -#else - size_t cpu_id = sched_getcpu(); -#endif - if (cpu_id >= _size) { - cpu_id &= _size - 1; - } - return access_at_core(cpu_id); - } - T* access_at_core(size_t core_idx) const { return _values[core_idx]; } - - inline void reset() { - for (int i = 0; i < _size; ++i) { - _values[i]->~T(); - } - _values.clear(); - _values.resize(_size, nullptr); - CoreLocalValueController* controller = CoreLocalValueController::instance(); - for (int i = 0; i < _size; ++i) { - void* ptr = controller->allocator(i)->get_or_create(_id); - _values[i] = new (ptr) T(); - } - } - -private: - int _id = -1; - size_t _size = 0; - std::vector _values; -}; - -} // namespace doris diff --git a/be/src/util/doris_metrics.cpp b/be/src/util/doris_metrics.cpp index e9d4f31e5ca137..e77ee1c36b6b89 100644 --- a/be/src/util/doris_metrics.cpp +++ b/be/src/util/doris_metrics.cpp @@ -311,17 +311,17 @@ DorisMetrics::DorisMetrics() : _metric_registry(_s_registry_name) { INT_GAUGE_METRIC_REGISTER(_server_metric_entity, broker_file_open_reading); INT_GAUGE_METRIC_REGISTER(_server_metric_entity, local_file_open_writing); INT_GAUGE_METRIC_REGISTER(_server_metric_entity, s3_file_open_writing); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_total); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_from_cache); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_from_remote); - - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, query_ctx_cnt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_ctx_cnt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_cnt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_cnt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_queued); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_running); - INT_ATOMIC_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_submit_failed); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_total); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_from_cache); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, num_io_bytes_read_from_remote); + + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, query_ctx_cnt); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_ctx_cnt); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_cnt); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_cnt); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_queued); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_running); + INT_COUNTER_METRIC_REGISTER(_server_metric_entity, scanner_task_submit_failed); } void DorisMetrics::initialize(bool init_system_metrics, const std::set& disk_devices, diff --git a/be/src/util/doris_metrics.h b/be/src/util/doris_metrics.h index 31b907eec9ed6c..d089758c21c93f 100644 --- a/be/src/util/doris_metrics.h +++ b/be/src/util/doris_metrics.h @@ -236,17 +236,17 @@ class DorisMetrics { UIntGauge* group_local_scan_thread_pool_queue_size = nullptr; UIntGauge* group_local_scan_thread_pool_thread_num = nullptr; - IntAtomicCounter* num_io_bytes_read_total = nullptr; - IntAtomicCounter* num_io_bytes_read_from_cache = nullptr; - IntAtomicCounter* num_io_bytes_read_from_remote = nullptr; - - IntAtomicCounter* query_ctx_cnt = nullptr; - IntAtomicCounter* scanner_ctx_cnt = nullptr; - IntAtomicCounter* scanner_cnt = nullptr; - IntAtomicCounter* scanner_task_cnt = nullptr; - IntAtomicCounter* scanner_task_queued = nullptr; - IntAtomicCounter* scanner_task_submit_failed = nullptr; - IntAtomicCounter* scanner_task_running = nullptr; + IntCounter* num_io_bytes_read_total = nullptr; + IntCounter* num_io_bytes_read_from_cache = nullptr; + IntCounter* num_io_bytes_read_from_remote = nullptr; + + IntCounter* query_ctx_cnt = nullptr; + IntCounter* scanner_ctx_cnt = nullptr; + IntCounter* scanner_cnt = nullptr; + IntCounter* scanner_task_cnt = nullptr; + IntCounter* scanner_task_queued = nullptr; + IntCounter* scanner_task_submit_failed = nullptr; + IntCounter* scanner_task_running = nullptr; static DorisMetrics* instance() { static DorisMetrics instance; diff --git a/be/src/util/metrics.h b/be/src/util/metrics.h index ac7e69a4ef8ab4..cb49884fefb60b 100644 --- a/be/src/util/metrics.h +++ b/be/src/util/metrics.h @@ -19,21 +19,17 @@ #include #include -#include -#include #include #include #include #include #include -#include #include #include #include #include -#include "util/core_local.h" #include "util/histogram.h" namespace doris { @@ -67,8 +63,8 @@ using Labels = std::unordered_map; class Metric { public: - Metric() {} - virtual ~Metric() {} + Metric() = default; + virtual ~Metric() = default; virtual std::string to_string() const = 0; virtual std::string to_prometheus(const std::string& display_name, const Labels& entity_labels, const Labels& metric_labels) const; @@ -83,7 +79,7 @@ template class AtomicMetric : public Metric { public: AtomicMetric() : _value(T()) {} - virtual ~AtomicMetric() {} + virtual ~AtomicMetric() = default; std::string to_string() const override { return std::to_string(value()); } @@ -101,81 +97,10 @@ class AtomicMetric : public Metric { std::atomic _value; }; -template -class LockSimpleMetric : public Metric { -public: - LockSimpleMetric() : _value(T()) {} - virtual ~LockSimpleMetric() {} - - std::string to_string() const override { return std::to_string(value()); } - - T value() const { - std::lock_guard l(_lock); - return _value; - } - - void increment(const T& delta) { - std::lock_guard l(this->_lock); - _value += delta; - } - - void set_value(const T& value) { - std::lock_guard l(this->_lock); - _value = value; - } - - rj::Value to_json_value(rj::Document::AllocatorType& allocator) const override { - return rj::Value(value()); - } - -protected: - // We use std::mutex instead of std::atomic is because atomic don't support - // double's fetch_add - // TODO(zc): If this is atomic is bottleneck, we change to thread local. - // performance: on Intel(R) Xeon(R) CPU E5-2450 int64_t - // original type: 2ns/op - // single thread std::mutex: 26ns/op - // multiple thread(8) std::mutex: 2500ns/op - mutable std::mutex _lock; - T _value; -}; - -template -class CoreLocalCounter : public Metric { -public: - CoreLocalCounter() {} - virtual ~CoreLocalCounter() {} - - std::string to_string() const override { - std::stringstream ss; - ss << value(); - return ss.str(); - } - - T value() const { - T sum = 0; - for (int i = 0; i < _value.size(); ++i) { - sum += *_value.access_at_core(i); - } - return sum; - } - - void increment(const T& delta) { __sync_fetch_and_add(_value.access(), delta); } - - void reset() { _value.reset(); } - - rj::Value to_json_value(rj::Document::AllocatorType& allocator) const override { - return rj::Value(value()); - } - -protected: - CoreLocalValue _value; -}; - class HistogramMetric : public Metric { public: - HistogramMetric() {} - virtual ~HistogramMetric() {} + HistogramMetric() = default; + virtual ~HistogramMetric() = default; HistogramMetric(const HistogramMetric&) = delete; HistogramMetric& operator=(const HistogramMetric&) = delete; @@ -208,41 +133,25 @@ class HistogramMetric : public Metric { template class AtomicCounter : public AtomicMetric { public: - AtomicCounter() {} - virtual ~AtomicCounter() {} + AtomicCounter() = default; + virtual ~AtomicCounter() = default; }; template class AtomicGauge : public AtomicMetric { public: AtomicGauge() : AtomicMetric() {} - virtual ~AtomicGauge() {} -}; - -template -class LockCounter : public LockSimpleMetric { -public: - LockCounter() : LockSimpleMetric() {} - virtual ~LockCounter() {} -}; - -// This can only used for trival type -template -class LockGauge : public LockSimpleMetric { -public: - LockGauge() : LockSimpleMetric() {} - virtual ~LockGauge() {} + virtual ~AtomicGauge() = default; }; -using IntCounter = CoreLocalCounter; -using IntAtomicCounter = AtomicCounter; -using UIntCounter = CoreLocalCounter; -using DoubleCounter = LockCounter; +using IntCounter = AtomicCounter; +using UIntCounter = AtomicCounter; +using DoubleCounter = AtomicCounter; using IntGauge = AtomicGauge; using UIntGauge = AtomicGauge; -using DoubleGauge = LockGauge; - +using DoubleGauge = AtomicGauge; using Labels = std::unordered_map; + struct MetricPrototype { public: MetricPrototype(MetricType type_, MetricUnit unit_, std::string name_, @@ -302,15 +211,12 @@ struct MetricPrototype { #define INT_GAUGE_METRIC_REGISTER(entity, metric) \ metric = (IntGauge*)(entity->register_metric(&METRIC_##metric)) -#define INT_DOUBLE_METRIC_REGISTER(entity, metric) \ +#define DOUBLE_GAUGE_METRIC_REGISTER(entity, metric) \ metric = (DoubleGauge*)(entity->register_metric(&METRIC_##metric)) #define INT_UGAUGE_METRIC_REGISTER(entity, metric) \ metric = (UIntGauge*)(entity->register_metric(&METRIC_##metric)) -#define INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, metric) \ - metric = (IntAtomicCounter*)(entity->register_metric(&METRIC_##metric)) - #define HISTOGRAM_METRIC_REGISTER(entity, metric) \ metric = (HistogramMetric*)(entity->register_metric(&METRIC_##metric)) @@ -338,8 +244,8 @@ enum class MetricEntityType { kServer, kTablet }; class MetricEntity { public: - MetricEntity(MetricEntityType type, const std::string& name, const Labels& labels) - : _type(type), _name(name), _labels(labels) {} + MetricEntity(MetricEntityType type, std::string name, Labels labels) + : _type(type), _name(std::move(name)), _labels(std::move(labels)) {} ~MetricEntity() { for (auto& metric : _metrics) { delete metric.second; @@ -401,7 +307,7 @@ using EntityMetricsByType = class MetricRegistry { public: - MetricRegistry(const std::string& name) : _name(name) {} + MetricRegistry(std::string name) : _name(std::move(name)) {} ~MetricRegistry(); std::shared_ptr register_entity( diff --git a/be/src/util/system_metrics.cpp b/be/src/util/system_metrics.cpp index fc2cdcc9262b31..ecbb4d580360c4 100644 --- a/be/src/util/system_metrics.cpp +++ b/be/src/util/system_metrics.cpp @@ -44,12 +44,12 @@ DEFINE_COUNTER_METRIC_PROTOTYPE_2ARG(avail_cpu_num, MetricUnit::NOUNIT); DEFINE_COUNTER_METRIC_PROTOTYPE_2ARG(host_cpu_num, MetricUnit::NOUNIT); struct CpuNumberMetrics { CpuNumberMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, host_cpu_num); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, avail_cpu_num); + INT_COUNTER_METRIC_REGISTER(entity, host_cpu_num); + INT_COUNTER_METRIC_REGISTER(entity, avail_cpu_num); } - IntAtomicCounter* host_cpu_num {nullptr}; - IntAtomicCounter* avail_cpu_num {nullptr}; + IntCounter* host_cpu_num {nullptr}; + IntCounter* avail_cpu_num {nullptr}; MetricEntity* entity = nullptr; }; @@ -70,16 +70,16 @@ DEFINE_CPU_COUNTER_METRIC(guest_nice); // /proc/stat: http://www.linuxhowtos.org/System/procstat.htm struct CpuMetrics { CpuMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_user); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_nice); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_system); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_idle); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_iowait); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_irq); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_soft_irq); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_steal); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_guest); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, cpu_guest_nice); + INT_COUNTER_METRIC_REGISTER(entity, cpu_user); + INT_COUNTER_METRIC_REGISTER(entity, cpu_nice); + INT_COUNTER_METRIC_REGISTER(entity, cpu_system); + INT_COUNTER_METRIC_REGISTER(entity, cpu_idle); + INT_COUNTER_METRIC_REGISTER(entity, cpu_iowait); + INT_COUNTER_METRIC_REGISTER(entity, cpu_irq); + INT_COUNTER_METRIC_REGISTER(entity, cpu_soft_irq); + INT_COUNTER_METRIC_REGISTER(entity, cpu_steal); + INT_COUNTER_METRIC_REGISTER(entity, cpu_guest); + INT_COUNTER_METRIC_REGISTER(entity, cpu_guest_nice); metrics[0] = cpu_user; metrics[1] = cpu_nice; @@ -96,18 +96,18 @@ struct CpuMetrics { static constexpr int cpu_num_metrics = 10; MetricEntity* entity = nullptr; - IntAtomicCounter* cpu_user; - IntAtomicCounter* cpu_nice; - IntAtomicCounter* cpu_system; - IntAtomicCounter* cpu_idle; - IntAtomicCounter* cpu_iowait; - IntAtomicCounter* cpu_irq; - IntAtomicCounter* cpu_soft_irq; - IntAtomicCounter* cpu_steal; - IntAtomicCounter* cpu_guest; - IntAtomicCounter* cpu_guest_nice; - - IntAtomicCounter* metrics[cpu_num_metrics]; + IntCounter* cpu_user; + IntCounter* cpu_nice; + IntCounter* cpu_system; + IntCounter* cpu_idle; + IntCounter* cpu_iowait; + IntCounter* cpu_irq; + IntCounter* cpu_soft_irq; + IntCounter* cpu_steal; + IntCounter* cpu_guest; + IntCounter* cpu_guest_nice; + + IntCounter* metrics[cpu_num_metrics]; }; #define DEFINE_MEMORY_GAUGE_METRIC(metric, unit) \ @@ -216,25 +216,25 @@ DEFINE_DISK_COUNTER_METRIC(io_time_weigthed, MetricUnit::MILLISECONDS); struct DiskMetrics { DiskMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_reads_completed); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_bytes_read); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_read_time_ms); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_writes_completed); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_bytes_written); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_write_time_ms); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_io_time_ms); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, disk_io_time_weigthed); + INT_COUNTER_METRIC_REGISTER(entity, disk_reads_completed); + INT_COUNTER_METRIC_REGISTER(entity, disk_bytes_read); + INT_COUNTER_METRIC_REGISTER(entity, disk_read_time_ms); + INT_COUNTER_METRIC_REGISTER(entity, disk_writes_completed); + INT_COUNTER_METRIC_REGISTER(entity, disk_bytes_written); + INT_COUNTER_METRIC_REGISTER(entity, disk_write_time_ms); + INT_COUNTER_METRIC_REGISTER(entity, disk_io_time_ms); + INT_COUNTER_METRIC_REGISTER(entity, disk_io_time_weigthed); } MetricEntity* entity = nullptr; - IntAtomicCounter* disk_reads_completed; - IntAtomicCounter* disk_bytes_read; - IntAtomicCounter* disk_read_time_ms; - IntAtomicCounter* disk_writes_completed; - IntAtomicCounter* disk_bytes_written; - IntAtomicCounter* disk_write_time_ms; - IntAtomicCounter* disk_io_time_ms; - IntAtomicCounter* disk_io_time_weigthed; + IntCounter* disk_reads_completed; + IntCounter* disk_bytes_read; + IntCounter* disk_read_time_ms; + IntCounter* disk_writes_completed; + IntCounter* disk_bytes_written; + IntCounter* disk_write_time_ms; + IntCounter* disk_io_time_ms; + IntCounter* disk_io_time_weigthed; }; #define DEFINE_NETWORK_COUNTER_METRIC(metric, unit) \ @@ -246,17 +246,17 @@ DEFINE_NETWORK_COUNTER_METRIC(send_packets, MetricUnit::PACKETS); struct NetworkMetrics { NetworkMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, network_receive_bytes); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, network_receive_packets); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, network_send_bytes); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, network_send_packets); + INT_COUNTER_METRIC_REGISTER(entity, network_receive_bytes); + INT_COUNTER_METRIC_REGISTER(entity, network_receive_packets); + INT_COUNTER_METRIC_REGISTER(entity, network_send_bytes); + INT_COUNTER_METRIC_REGISTER(entity, network_send_packets); } MetricEntity* entity = nullptr; - IntAtomicCounter* network_receive_bytes; - IntAtomicCounter* network_receive_packets; - IntAtomicCounter* network_send_bytes; - IntAtomicCounter* network_send_packets; + IntCounter* network_receive_bytes; + IntCounter* network_receive_packets; + IntCounter* network_send_bytes; + IntCounter* network_send_packets; }; #define DEFINE_SNMP_COUNTER_METRIC(metric, unit, desc) \ @@ -270,17 +270,17 @@ DEFINE_SNMP_COUNTER_METRIC(tcp_out_segs, MetricUnit::NOUNIT, "All send TCP packe // metrics read from /proc/net/snmp struct SnmpMetrics { SnmpMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, snmp_tcp_in_errs); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, snmp_tcp_retrans_segs); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, snmp_tcp_in_segs); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, snmp_tcp_out_segs); + INT_COUNTER_METRIC_REGISTER(entity, snmp_tcp_in_errs); + INT_COUNTER_METRIC_REGISTER(entity, snmp_tcp_retrans_segs); + INT_COUNTER_METRIC_REGISTER(entity, snmp_tcp_in_segs); + INT_COUNTER_METRIC_REGISTER(entity, snmp_tcp_out_segs); } MetricEntity* entity = nullptr; - IntAtomicCounter* snmp_tcp_in_errs; - IntAtomicCounter* snmp_tcp_retrans_segs; - IntAtomicCounter* snmp_tcp_in_segs; - IntAtomicCounter* snmp_tcp_out_segs; + IntCounter* snmp_tcp_in_errs; + IntCounter* snmp_tcp_retrans_segs; + IntCounter* snmp_tcp_in_segs; + IntCounter* snmp_tcp_out_segs; }; #define DEFINE_FD_COUNTER_METRIC(metric, unit) \ @@ -308,9 +308,9 @@ DEFINE_LOAD_AVERAGE_DOUBLE_METRIC(15_minutes); struct LoadAverageMetrics { LoadAverageMetrics(MetricEntity* ent) : entity(ent) { - INT_DOUBLE_METRIC_REGISTER(entity, load_average_1_minutes); - INT_DOUBLE_METRIC_REGISTER(entity, load_average_5_minutes); - INT_DOUBLE_METRIC_REGISTER(entity, load_average_15_minutes); + DOUBLE_GAUGE_METRIC_REGISTER(entity, load_average_1_minutes); + DOUBLE_GAUGE_METRIC_REGISTER(entity, load_average_5_minutes); + DOUBLE_GAUGE_METRIC_REGISTER(entity, load_average_15_minutes); } MetricEntity* entity = nullptr; @@ -329,18 +329,18 @@ DEFINE_PROC_STAT_COUNTER_METRIC(procs_blocked); struct ProcMetrics { ProcMetrics(MetricEntity* ent) : entity(ent) { - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, proc_interrupt); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, proc_ctxt_switch); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, proc_procs_running); - INT_ATOMIC_COUNTER_METRIC_REGISTER(entity, proc_procs_blocked); + INT_COUNTER_METRIC_REGISTER(entity, proc_interrupt); + INT_COUNTER_METRIC_REGISTER(entity, proc_ctxt_switch); + INT_COUNTER_METRIC_REGISTER(entity, proc_procs_running); + INT_COUNTER_METRIC_REGISTER(entity, proc_procs_blocked); } MetricEntity* entity = nullptr; - IntAtomicCounter* proc_interrupt; - IntAtomicCounter* proc_ctxt_switch; - IntAtomicCounter* proc_procs_running; - IntAtomicCounter* proc_procs_blocked; + IntCounter* proc_interrupt; + IntCounter* proc_ctxt_switch; + IntCounter* proc_procs_running; + IntCounter* proc_procs_blocked; }; DEFINE_GAUGE_CORE_METRIC_PROTOTYPE_2ARG(max_disk_io_util_percent, MetricUnit::PERCENT); diff --git a/be/test/util/core_local_test.cpp b/be/test/util/core_local_test.cpp deleted file mode 100644 index ed87015b189e1c..00000000000000 --- a/be/test/util/core_local_test.cpp +++ /dev/null @@ -1,122 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -#include "util/core_local.h" - -#include -#include -#include -#include - -#include -#include - -#include "common/logging.h" -#include "gtest/gtest_pred_impl.h" -#include "testutil/test_util.h" -#include "util/stopwatch.hpp" - -namespace doris { - -// Fixture for testing class Decompressor -class CoreLocalTest : public ::testing::Test { -protected: - CoreLocalTest() {} - ~CoreLocalTest() {} -}; - -void updater(int64_t loop, CoreLocalValue* value, int64_t* used_ns) { - usleep(100); - MonotonicStopWatch stopwatch; - stopwatch.start(); - for (int i = 0; i < loop; ++i) { - __sync_fetch_and_add(value->access(), 1); - } - *used_ns = stopwatch.elapsed_time(); -} - -TEST_F(CoreLocalTest, CoreLocalValue) { - int64_t loop = LOOP_LESS_OR_MORE(1000, 1000000L); - CoreLocalValue value; - std::vector used_ns; - used_ns.resize(8); - std::vector workers; - for (int i = 0; i < 8; ++i) { - workers.emplace_back(updater, loop, &value, &used_ns[i]); - } - int64_t sum_ns = 0; - for (int i = 0; i < 8; ++i) { - workers[i].join(); - sum_ns += used_ns[i]; - } - int64_t sum = 0; - for (int i = 0; i < value.size(); ++i) { - sum += __sync_fetch_and_add(value.access_at_core(i), 0); - } - EXPECT_EQ(8 * loop, sum); - LOG(INFO) << "time:" << sum_ns / sum << "ns/op"; -} - -TEST_F(CoreLocalTest, CoreDataAllocator) { - CoreDataAllocatorFactory factory; - auto allocator1 = factory.get_allocator(1, 8); - auto ptr = allocator1->get_or_create(0); - EXPECT_TRUE(ptr != nullptr); - { - auto ptr2 = allocator1->get_or_create(0); - EXPECT_TRUE(ptr == ptr2); - } - { - auto ptr2 = allocator1->get_or_create(4096); - EXPECT_TRUE(ptr2 != nullptr); - } - { - auto allocator2 = factory.get_allocator(2, 8); - EXPECT_TRUE(allocator2 != allocator1); - } -} - -TEST_F(CoreLocalTest, CoreLocalValueController) { - CoreLocalValueController controller; - auto id = controller.get_id(); - EXPECT_EQ(0, id); - controller.reclaim_id(id); - id = controller.get_id(); - EXPECT_EQ(0, id); - id = controller.get_id(); - EXPECT_EQ(1, id); -} - -TEST_F(CoreLocalTest, CoreLocalValueNormal) { - CoreLocalValue value; - for (int i = 0; i < value.size(); ++i) { - EXPECT_EQ(0, *value.access_at_core(i)); - *value.access_at_core(i) += 1; - } - for (int i = 0; i < value.size(); ++i) { - EXPECT_EQ(1, *value.access_at_core(i)); - } - for (int i = 0; i < 10000; ++i) { - *value.access() += 1; - } - int64_t sum = 0; - for (int i = 0; i < value.size(); ++i) { - sum += *value.access_at_core(i); - } - EXPECT_EQ(10000 + value.size(), sum); -} -} // namespace doris diff --git a/be/test/util/doris_metrics_test.cpp b/be/test/util/doris_metrics_test.cpp index dcba57cb7e9ff2..6e9969b1210345 100644 --- a/be/test/util/doris_metrics_test.cpp +++ b/be/test/util/doris_metrics_test.cpp @@ -34,14 +34,14 @@ TEST_F(DorisMetricsTest, Normal) { auto server_entity = DorisMetrics::instance()->server_entity(); // check metric { - DorisMetrics::instance()->fragment_requests_total->reset(); + DorisMetrics::instance()->fragment_requests_total->set_value(0); DorisMetrics::instance()->fragment_requests_total->increment(12); auto metric = server_entity->get_metric("fragment_requests_total"); EXPECT_TRUE(metric != nullptr); EXPECT_STREQ("12", metric->to_string().c_str()); } { - DorisMetrics::instance()->fragment_request_duration_us->reset(); + DorisMetrics::instance()->fragment_request_duration_us->set_value(0); DorisMetrics::instance()->fragment_request_duration_us->increment(101); auto metric = server_entity->get_metric("fragment_request_duration_us"); EXPECT_TRUE(metric != nullptr); @@ -92,7 +92,7 @@ TEST_F(DorisMetricsTest, Normal) { } // engine request { - DorisMetrics::instance()->create_tablet_requests_total->reset(); + DorisMetrics::instance()->create_tablet_requests_total->set_value(0); DorisMetrics::instance()->create_tablet_requests_total->increment(15); auto metric = server_entity->get_metric("create_tablet_requests_total", "engine_requests_total"); @@ -100,7 +100,7 @@ TEST_F(DorisMetricsTest, Normal) { EXPECT_STREQ("15", metric->to_string().c_str()); } { - DorisMetrics::instance()->drop_tablet_requests_total->reset(); + DorisMetrics::instance()->drop_tablet_requests_total->set_value(0); DorisMetrics::instance()->drop_tablet_requests_total->increment(16); auto metric = server_entity->get_metric("drop_tablet_requests_total", "engine_requests_total"); @@ -129,7 +129,7 @@ TEST_F(DorisMetricsTest, Normal) { EXPECT_STREQ("20", metric->to_string().c_str()); } { - DorisMetrics::instance()->storage_migrate_requests_total->reset(); + DorisMetrics::instance()->storage_migrate_requests_total->set_value(0); DorisMetrics::instance()->storage_migrate_requests_total->increment(21); auto metric = server_entity->get_metric("storage_migrate_requests_total", "engine_requests_total"); diff --git a/be/test/util/metrics_test.cpp b/be/test/util/metrics_test.cpp index 305d17c47ca06f..1703b5b42bd7b4 100644 --- a/be/test/util/metrics_test.cpp +++ b/be/test/util/metrics_test.cpp @@ -46,7 +46,7 @@ TEST_F(MetricsTest, Counter) { EXPECT_STREQ("100", counter.to_string().c_str()); } { - IntAtomicCounter counter; + IntCounter counter; EXPECT_EQ(0, counter.value()); counter.increment(100); EXPECT_EQ(100, counter.value()); @@ -99,7 +99,7 @@ TEST_F(MetricsTest, CounterPerf) { } // IntAtomicCounter { - IntAtomicCounter counter; + IntCounter counter; MonotonicStopWatch watch; watch.start(); for (int i = 0; i < kLoopCount; ++i) { @@ -141,11 +141,11 @@ TEST_F(MetricsTest, CounterPerf) { } // multi-thread for IntAtomicCounter { - IntAtomicCounter mt_counter; + IntCounter mt_counter; std::vector updaters; std::atomic used_time(0); for (int i = 0; i < 8; ++i) { - updaters.emplace_back(&mt_updater, kThreadLoopCount, &mt_counter, + updaters.emplace_back(&mt_updater, kThreadLoopCount, &mt_counter, &used_time); } for (int i = 0; i < 8; ++i) { From 1853d1591b4b5af0b1fad27ee6380cc94e3c00cd Mon Sep 17 00:00:00 2001 From: Yongqiang YANG Date: Mon, 23 Dec 2024 17:31:26 +0800 Subject: [PATCH 59/82] [chore](log) remove useless resource id log on write path (#45618) --- be/src/cloud/cloud_storage_engine.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/be/src/cloud/cloud_storage_engine.h b/be/src/cloud/cloud_storage_engine.h index 072b8366542253..2cd47c52dbeb62 100644 --- a/be/src/cloud/cloud_storage_engine.h +++ b/be/src/cloud/cloud_storage_engine.h @@ -75,7 +75,7 @@ class CloudStorageEngine final : public BaseStorageEngine { void _check_file_cache_ttl_block_valid(); std::optional get_storage_resource(const std::string& vault_id) { - LOG(INFO) << "Getting storage resource for vault_id: " << vault_id; + VLOG_DEBUG << "Getting storage resource for vault_id: " << vault_id; bool synced = false; do { From 81f3c4841abfc1d69b0fbf96c6521e38b9777085 Mon Sep 17 00:00:00 2001 From: 924060929 Date: Mon, 23 Dec 2024 17:50:07 +0800 Subject: [PATCH 60/82] [enhancement](nereids) improve lots of values in `insert into values` statement (#40202) improve lots of values in `insert into values` statement by bypass NereidsPlanner the main logic is 1. `InsertUtils.normalizePlan` use `FoldConstantRuleOnFE` to reduce the expression, e.g. `values(date(now())` 2. `FastInsertIntoValuesPlanner` skip most of rules to analyze and rewrite `LogicalInlineTable` to `LogicalUnion` or `LogicalOneRowRelation` 3. fast parse date time string without date format 4. getHintMap and normal lexer share the same tokens 5. `set enable_fast_analyze_into_values=false` can force to execute all optimize rules, when we meet some bugs in `FastInsertIntoValuesPlanner` test: insert 1000 rows with 1000 columns, the columns contains int, bigint, decimal(26,7), date, datetime, varchar(10 chinese chars) +---------------------------------+------------------------------------------------------+--------------------------+--------------------------+ |FastInsertIntoValuesPlanner |NereidsPlanner(enable_fast_analyze_into_values=false) |Legacy optimizer in 2.1.6 | Nereids planner in 2.1.6 | +---------------------------------+------------------------------------------------------+--------------------------+--------------------------+ |16s(bottleneck is antlr's lexer) |32s |16s |80s | +---------------------------------+------------------------------------------------------+--------------------------+--------------------------+ If you use FastInsertIntoValuesPlanner with group commit in a transaction, the time can reduce to 12s. TODO: build a custom lexer. in my hand write lexer test, FastInsertIntoValuesPlanner without group commit can reduce 16s to 12s, but it will take more effort: RegularExpression -> NFA -> DFA -> minimal DFA -> Lexer codegen --- be/src/http/http_channel.cpp | 3 +- .../org/apache/doris/nereids/DorisParser.g4 | 6 +- .../apache/doris/nereids/CascadesContext.java | 9 +- .../apache/doris/nereids/NereidsPlanner.java | 19 +- .../nereids/analyzer/UnboundInlineTable.java | 87 ++++++++ .../doris/nereids/analyzer/UnboundPlan.java | 39 ++++ .../nereids/analyzer/UnboundTableSink.java | 9 + .../translator/PhysicalPlanTranslator.java | 26 ++- .../nereids/parser/LogicalPlanBuilder.java | 78 +++++--- .../doris/nereids/parser/NereidsParser.java | 49 +++-- .../doris/nereids/pattern/PlanPatterns.java | 8 + .../apache/doris/nereids/rules/RuleType.java | 4 +- .../rules/analysis/BindExpression.java | 22 +- .../nereids/rules/analysis/BindSink.java | 4 +- .../rules/expression/ExpressionRewrite.java | 50 +++-- .../expression/rules/ConvertAggStateCast.java | 3 +- .../rules/FoldConstantRuleOnFE.java | 9 + .../nereids/stats/ExpressionEstimation.java | 5 +- .../expressions/literal/DateLiteral.java | 49 ++++- .../expressions/literal/DateTimeLiteral.java | 2 +- .../nereids/trees/plans/Explainable.java | 9 + .../doris/nereids/trees/plans/PlanType.java | 1 + .../trees/plans/algebra/InlineTable.java | 28 +++ .../trees/plans/commands/ExplainCommand.java | 8 +- .../insert/BatchInsertIntoTableCommand.java | 54 ++++- .../insert/FastInsertIntoValuesPlanner.java | 166 +++++++++++++++ .../insert/InsertIntoTableCommand.java | 54 +++-- .../insert/InsertIntoValuesAnalyzer.java | 156 +++++++++++++++ .../insert/InsertOverwriteTableCommand.java | 59 ++++-- .../plans/commands/insert/InsertUtils.java | 189 +++++++++++++++--- .../insert/OlapGroupCommitInsertExecutor.java | 8 +- .../plans/logical/LogicalInlineTable.java | 54 ++++- .../plans/logical/LogicalOneRowRelation.java | 2 +- .../plans/logical/LogicalSetOperation.java | 3 +- .../trees/plans/logical/LogicalUnion.java | 81 ++++++++ .../plans/physical/PhysicalOlapTableSink.java | 2 +- .../trees/plans/visitor/PlanVisitor.java | 6 + .../doris/nereids/util/TypeCoercionUtils.java | 3 +- .../org/apache/doris/qe/AuditLogHelper.java | 6 +- .../org/apache/doris/qe/SessionVariable.java | 21 +- .../SimplifyComparisonPredicateSqlTest.java | 27 ++- .../trees/expressions/SelectReplaceTest.java | 8 +- .../expressions/literal/DateLiteralTest.java | 7 +- .../suites/compression_p0/load.groovy | 2 +- ...test_nestedtypes_insert_into_select.groovy | 4 +- regression-test/suites/index_p0/load.groovy | 2 +- .../insert_group_commit_with_exception.groovy | 2 +- .../dimension/dimension_2_inner_join.groovy | 2 +- 48 files changed, 1217 insertions(+), 228 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundInlineTable.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundPlan.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/algebra/InlineTable.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/FastInsertIntoValuesPlanner.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoValuesAnalyzer.java diff --git a/be/src/http/http_channel.cpp b/be/src/http/http_channel.cpp index 312f1ab9286909..598330ff7cbcfb 100644 --- a/be/src/http/http_channel.cpp +++ b/be/src/http/http_channel.cpp @@ -123,7 +123,8 @@ void HttpChannel::send_files(HttpRequest* request, const std::string& root_dir, VLOG_DEBUG << "http channel send file " << file_path << ", size: " << file_size; evbuffer_add_printf(evb.get(), "File-Name: %s\r\n", file.c_str()); - evbuffer_add_printf(evb.get(), "Content-Length: %ld\r\n", file_size); + evbuffer_add_printf(evb.get(), "Content-Length: %" PRIi64 "\r\n", file_size); + evbuffer_add_printf(evb.get(), "\r\n"); if (file_size > 0) { evbuffer_add_file(evb.get(), fd, 0, file_size); diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index 368847bac5f270..37e1c68cefb91c 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -1476,7 +1476,9 @@ rowConstructor ; rowConstructorItem - : namedExpression | DEFAULT + : constant // duplicate constant rule for improve the parse of `insert into tbl values` + | DEFAULT + | namedExpression ; predicate @@ -1678,7 +1680,7 @@ constant | LEFT_BRACE (items+=constant COLON items+=constant)? (COMMA items+=constant COLON items+=constant)* RIGHT_BRACE #mapLiteral | LEFT_BRACE items+=constant (COMMA items+=constant)* RIGHT_BRACE #structLiteral - | PLACEHOLDER #placeholder + | PLACEHOLDER #placeholder ; comparisonOperator diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java index 258704763909f1..4f81dde82d97f2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/CascadesContext.java @@ -473,9 +473,16 @@ public void setCurrentRootRewriteJobContext(RootRewriteJobContext currentRootRew this.currentRootRewriteJobContext = Optional.ofNullable(currentRootRewriteJobContext); } + /** showPlanProcess */ public boolean showPlanProcess() { Boolean show = showPlanProcess.get(); - return show != null && show; + if (show != null && show) { + return true; + } + if (parent.isPresent()) { + return parent.get().showPlanProcess(); + } + return false; } /** set showPlanProcess in task scope */ diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java index 4eafa0e2172f96..6b1c1dd6734435 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java @@ -98,17 +98,19 @@ */ public class NereidsPlanner extends Planner { public static final Logger LOG = LogManager.getLogger(NereidsPlanner.class); + + protected Plan parsedPlan; + protected Plan analyzedPlan; + protected Plan rewrittenPlan; + protected Plan optimizedPlan; + protected PhysicalPlan physicalPlan; + private CascadesContext cascadesContext; private final StatementContext statementContext; private final List scanNodeList = Lists.newArrayList(); private final List physicalRelations = Lists.newArrayList(); private DescriptorTable descTable; - private Plan parsedPlan; - private Plan analyzedPlan; - private Plan rewrittenPlan; - private Plan optimizedPlan; - private PhysicalPlan physicalPlan; private FragmentIdMapping distributedPlans; // The cost of optimized plan private double cost = 0; @@ -552,7 +554,7 @@ public Group getRoot() { return cascadesContext.getMemo().getRoot(); } - private PhysicalPlan chooseNthPlan(Group rootGroup, PhysicalProperties physicalProperties, int nthPlan) { + protected PhysicalPlan chooseNthPlan(Group rootGroup, PhysicalProperties physicalProperties, int nthPlan) { if (nthPlan <= 1) { cost = rootGroup.getLowestCostPlan(physicalProperties).orElseThrow( () -> new AnalysisException("lowestCostPlans with physicalProperties(" @@ -605,6 +607,9 @@ private PhysicalPlan chooseBestPlan(Group rootGroup, PhysicalProperties physical } private long getGarbageCollectionTime() { + if (!ConnectContext.get().getSessionVariable().enableProfile()) { + return 0; + } List gcMxBeans = ManagementFactory.getGarbageCollectorMXBeans(); long initialGCTime = 0; for (GarbageCollectorMXBean gcBean : gcMxBeans) { @@ -881,7 +886,7 @@ private boolean showPlanProcess(ExplainOptions explainOptions) { return explainOptions != null && explainOptions.showPlanProcess(); } - private void keepOrShowPlanProcess(boolean showPlanProcess, Runnable task) { + protected void keepOrShowPlanProcess(boolean showPlanProcess, Runnable task) { if (showPlanProcess) { cascadesContext.withPlanProcess(showPlanProcess, task); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundInlineTable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundInlineTable.java new file mode 100644 index 00000000000000..42d637d676fae2 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundInlineTable.java @@ -0,0 +1,87 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.analyzer; + +import org.apache.doris.nereids.exceptions.UnboundException; +import org.apache.doris.nereids.memo.GroupExpression; +import org.apache.doris.nereids.properties.LogicalProperties; +import org.apache.doris.nereids.trees.expressions.Expression; +import org.apache.doris.nereids.trees.expressions.NamedExpression; +import org.apache.doris.nereids.trees.expressions.Slot; +import org.apache.doris.nereids.trees.plans.BlockFuncDepsPropagation; +import org.apache.doris.nereids.trees.plans.Plan; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; +import org.apache.doris.nereids.trees.plans.logical.LogicalLeaf; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.nereids.util.Utils; + +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** UnboundInlineTable */ +public class UnboundInlineTable extends LogicalLeaf implements InlineTable, BlockFuncDepsPropagation, UnboundPlan { + private final List> constantExprsList; + + public UnboundInlineTable(List> constantExprsList) { + super(PlanType.LOGICAL_UNBOUND_INLINE_TABLE, Optional.empty(), Optional.empty()); + this.constantExprsList = Utils.fastToImmutableList( + Objects.requireNonNull(constantExprsList, "constantExprsList can not be null") + ); + } + + public List> getConstantExprsList() { + return constantExprsList; + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitUnboundInlineTable(this, context); + } + + @Override + public List getExpressions() { + ImmutableList.Builder expressions = ImmutableList.builderWithExpectedSize( + constantExprsList.size() * constantExprsList.get(0).size()); + + for (List namedExpressions : constantExprsList) { + expressions.addAll(namedExpressions); + } + + return expressions.build(); + } + + @Override + public Plan withGroupExpression(Optional groupExpression) { + return this; + } + + @Override + public Plan withGroupExprLogicalPropChildren(Optional groupExpression, + Optional logicalProperties, List children) { + return this; + } + + @Override + public List computeOutput() { + throw new UnboundException("output"); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundPlan.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundPlan.java new file mode 100644 index 00000000000000..2b743f958aaa02 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundPlan.java @@ -0,0 +1,39 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.analyzer; + +import org.apache.doris.nereids.exceptions.UnboundException; +import org.apache.doris.nereids.properties.LogicalProperties; +import org.apache.doris.nereids.properties.UnboundLogicalProperties; +import org.apache.doris.nereids.trees.expressions.Slot; +import org.apache.doris.nereids.trees.plans.Plan; + +import java.util.List; + +/** UnboundPlan */ +public interface UnboundPlan extends Plan { + @Override + default LogicalProperties computeLogicalProperties() { + return UnboundLogicalProperties.INSTANCE; + } + + @Override + default List computeOutput() { + throw new UnboundException("output"); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java index 0e528227dc9742..8cf32648d55f05 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/analyzer/UnboundTableSink.java @@ -34,6 +34,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import org.apache.commons.lang3.StringUtils; import java.util.List; import java.util.Objects; @@ -176,4 +177,12 @@ public LogicalProperties computeLogicalProperties() { public List computeOutput() { throw new UnboundException("output"); } + + @Override + public String toString() { + return Utils.toSqlString("UnboundTableSink", + "nameParts", StringUtils.join(nameParts, "."), + "colNames", colNames, + "hints", hints); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java index f0fa59977a1902..85243c4b545420 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/glue/translator/PhysicalPlanTranslator.java @@ -207,6 +207,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; @@ -2058,17 +2059,22 @@ public PlanFragment visitPhysicalSetOperation( } setOperationNode.setNereidsId(setOperation.getId()); - setOperation.getRegularChildrenOutputs().stream() - .map(o -> o.stream() - .map(e -> ExpressionTranslator.translate(e, context)) - .collect(ImmutableList.toImmutableList())) - .forEach(setOperationNode::addResultExprLists); + for (List regularChildrenOutput : setOperation.getRegularChildrenOutputs()) { + Builder translateOutputs = ImmutableList.builderWithExpectedSize(regularChildrenOutput.size()); + for (SlotReference childOutput : regularChildrenOutput) { + translateOutputs.add(ExpressionTranslator.translate(childOutput, context)); + } + setOperationNode.addResultExprLists(translateOutputs.build()); + } + if (setOperation instanceof PhysicalUnion) { - ((PhysicalUnion) setOperation).getConstantExprsList().stream() - .map(l -> l.stream() - .map(e -> ExpressionTranslator.translate(e, context)) - .collect(ImmutableList.toImmutableList())) - .forEach(setOperationNode::addConstExprList); + for (List unionConsts : ((PhysicalUnion) setOperation).getConstantExprsList()) { + Builder translateConsts = ImmutableList.builderWithExpectedSize(unionConsts.size()); + for (NamedExpression unionConst : unionConsts) { + translateConsts.add(ExpressionTranslator.translate(unionConst, context)); + } + setOperationNode.addConstExprList(translateConsts.build()); + } } for (PlanFragment childFragment : childrenFragments) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index 7bc328e238d99d..d98d0660f5c9cb 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -335,7 +335,7 @@ import org.apache.doris.nereids.StatementContext; import org.apache.doris.nereids.analyzer.UnboundAlias; import org.apache.doris.nereids.analyzer.UnboundFunction; -import org.apache.doris.nereids.analyzer.UnboundOneRowRelation; +import org.apache.doris.nereids.analyzer.UnboundInlineTable; import org.apache.doris.nereids.analyzer.UnboundRelation; import org.apache.doris.nereids.analyzer.UnboundResultSink; import org.apache.doris.nereids.analyzer.UnboundSlot; @@ -357,6 +357,7 @@ import org.apache.doris.nereids.properties.SelectHintUseMv; import org.apache.doris.nereids.trees.TableSample; import org.apache.doris.nereids.trees.expressions.Add; +import org.apache.doris.nereids.trees.expressions.Alias; import org.apache.doris.nereids.trees.expressions.And; import org.apache.doris.nereids.trees.expressions.BitAnd; import org.apache.doris.nereids.trees.expressions.BitNot; @@ -489,6 +490,8 @@ import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.PlanType; import org.apache.doris.nereids.trees.plans.algebra.Aggregate; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; +import org.apache.doris.nereids.trees.plans.algebra.OneRowRelation; import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; import org.apache.doris.nereids.trees.plans.commands.AddConstraintCommand; import org.apache.doris.nereids.trees.plans.commands.AdminCancelRebalanceDiskCommand; @@ -692,10 +695,10 @@ import org.apache.doris.nereids.trees.plans.logical.LogicalFilter; import org.apache.doris.nereids.trees.plans.logical.LogicalGenerate; import org.apache.doris.nereids.trees.plans.logical.LogicalHaving; -import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalIntersect; import org.apache.doris.nereids.trees.plans.logical.LogicalJoin; import org.apache.doris.nereids.trees.plans.logical.LogicalLimit; +import org.apache.doris.nereids.trees.plans.logical.LogicalOneRowRelation; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.logical.LogicalProject; import org.apache.doris.nereids.trees.plans.logical.LogicalQualify; @@ -887,7 +890,7 @@ public LogicalPlan visitInsertTable(InsertTableContext ctx) { } else { throw new ParseException("tableName and tableId cannot both be null"); } - Optional labelName = ctx.labelName == null ? Optional.empty() : Optional.of(ctx.labelName.getText()); + Optional labelName = (ctx.labelName == null) ? Optional.empty() : Optional.of(ctx.labelName.getText()); List colNames = ctx.cols == null ? ImmutableList.of() : visitIdentifierList(ctx.cols); // TODO visit partitionSpecCtx LogicalPlan plan = visitQuery(ctx.query()); @@ -918,7 +921,7 @@ public LogicalPlan visitInsertTable(InsertTableContext ctx) { command = new InsertOverwriteTableCommand(sink, labelName, cte); } else { if (ConnectContext.get() != null && ConnectContext.get().isTxnModel() - && sink.child() instanceof LogicalInlineTable + && sink.child() instanceof InlineTable && sink.child().getExpressions().stream().allMatch(Expression::isConstant)) { // FIXME: In legacy, the `insert into select 1` is handled as `insert into values`. // In nereids, the original way is throw an AnalysisException and fallback to legacy. @@ -1848,8 +1851,8 @@ public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationConte LogicalPlan selectPlan; LogicalPlan relation; if (ctx.fromClause() == null) { - relation = new UnboundOneRowRelation(StatementScopeIdGenerator.newRelationId(), - ImmutableList.of(new UnboundAlias(Literal.of(0)))); + relation = new LogicalOneRowRelation(StatementScopeIdGenerator.newRelationId(), + ImmutableList.of(new Alias(Literal.of(0)))); } else { relation = visitFromClause(ctx.fromClause()); } @@ -1879,10 +1882,13 @@ public LogicalPlan visitRegularQuerySpecification(RegularQuerySpecificationConte @Override public LogicalPlan visitInlineTable(InlineTableContext ctx) { - List> values = ctx.rowConstructor().stream() - .map(this::visitRowConstructor) - .collect(ImmutableList.toImmutableList()); - return new LogicalInlineTable(values); + List rowConstructorContexts = ctx.rowConstructor(); + ImmutableList.Builder> rows + = ImmutableList.builderWithExpectedSize(rowConstructorContexts.size()); + for (RowConstructorContext rowConstructorContext : rowConstructorContexts) { + rows.add(visitRowConstructor(rowConstructorContext)); + } + return new UnboundInlineTable(rows.build()); } /** @@ -2049,18 +2055,22 @@ public Expression visitStar(StarContext ctx) { throw new ParseException("only one replace clause is supported", ctx); } ReplaceContext replaceContext = (ReplaceContext) exceptOrReplace; - List expectAlias = getNamedExpressions(replaceContext.namedExpressionSeq()); - boolean allAlias = expectAlias.stream() - .allMatch(e -> e instanceof UnboundAlias - && ((UnboundAlias) e).getAlias().isPresent()); - if (expectAlias.isEmpty() || !allAlias) { - throw new ParseException( - "only alias is supported in select-replace clause", ctx); + List expectAlias = Lists.newArrayList(); + NamedExpressionSeqContext namedExpressions = replaceContext.namedExpressionSeq(); + for (NamedExpressionContext namedExpressionContext : namedExpressions.namedExpression()) { + if (namedExpressionContext.identifierOrText() == null) { + throw new ParseException("only alias is supported in select-replace clause", ctx); + } + expectAlias.add((NamedExpression) namedExpressionContext.accept(this)); + } + if (expectAlias.isEmpty()) { + throw new ParseException("only alias is supported in select-replace clause", ctx); } finalReplacedAlias = expectAlias; } else { - throw new ParseException("Unsupported except or replace clause: " + exceptOrReplace.getText(), - ctx); + throw new ParseException( + "Unsupported except or replace clause: " + exceptOrReplace.getText(), ctx + ); } } return new UnboundStar(target, finalExpectSlots, finalReplacedAlias); @@ -2081,11 +2091,16 @@ public NamedExpression visitNamedExpression(NamedExpressionContext ctx) { if (ctx.identifierOrText() == null) { if (expression instanceof NamedExpression) { return (NamedExpression) expression; + } else if (expression instanceof Literal) { + return new Alias(expression); } else { return new UnboundAlias(expression); } } String alias = visitIdentifierOrText(ctx.identifierOrText()); + if (expression instanceof Literal) { + return new Alias(expression, alias); + } return new UnboundAlias(expression, alias); }); } @@ -3013,14 +3028,21 @@ public Expression visitParenthesizedExpression(ParenthesizedExpressionContext ct @Override public List visitRowConstructor(RowConstructorContext ctx) { - return ctx.rowConstructorItem().stream() - .map(this::visitRowConstructorItem) - .collect(ImmutableList.toImmutableList()); + List rowConstructorItemContexts = ctx.rowConstructorItem(); + ImmutableList.Builder columns + = ImmutableList.builderWithExpectedSize(rowConstructorItemContexts.size()); + for (RowConstructorItemContext rowConstructorItemContext : rowConstructorItemContexts) { + columns.add(visitRowConstructorItem(rowConstructorItemContext)); + } + return columns.build(); } @Override public NamedExpression visitRowConstructorItem(RowConstructorItemContext ctx) { - if (ctx.DEFAULT() != null) { + ConstantContext constant = ctx.constant(); + if (constant != null) { + return new Alias((Expression) constant.accept(this)); + } else if (ctx.DEFAULT() != null) { return new DefaultValueSlot(); } else { return visitNamedExpression(ctx.namedExpression()); @@ -3565,14 +3587,6 @@ private LogicalPlan withLimit(LogicalPlan input, Optional li }); } - private UnboundOneRowRelation withOneRowRelation(SelectColumnClauseContext selectCtx) { - return ParserUtils.withOrigin(selectCtx, () -> { - // fromClause does not exists. - List projects = getNamedExpressions(selectCtx.namedExpressionSeq()); - return new UnboundOneRowRelation(StatementScopeIdGenerator.newRelationId(), projects); - }); - } - /** * Add a regular (SELECT) query specification to a logical plan. The query specification * is the core of the logical plan, this is where sourcing (FROM clause), projection (SELECT), @@ -3830,7 +3844,7 @@ protected LogicalPlan withProjection(LogicalPlan input, SelectColumnClauseContex } } else { List projects = getNamedExpressions(selectCtx.namedExpressionSeq()); - if (input instanceof UnboundOneRowRelation) { + if (input instanceof OneRowRelation) { if (projects.stream().anyMatch(project -> project instanceof UnboundStar)) { throw new ParseException("SELECT * must have a FROM clause"); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java index 4ed71bbbc14673..c273f50b04ac44 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/NereidsParser.java @@ -56,6 +56,7 @@ import java.lang.reflect.Method; import java.util.BitSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; @@ -326,37 +327,40 @@ private T parse(String sql, Function parseFu private T parse(String sql, @Nullable LogicalPlanBuilder logicalPlanBuilder, Function parseFunction) { - ParserRuleContext tree = toAst(sql, parseFunction); + CommonTokenStream tokenStream = parseAllTokens(sql); + ParserRuleContext tree = toAst(tokenStream, parseFunction); LogicalPlanBuilder realLogicalPlanBuilder = logicalPlanBuilder == null - ? new LogicalPlanBuilder(getHintMap(sql, DorisParser::selectHint)) : logicalPlanBuilder; + ? new LogicalPlanBuilder(getHintMap(sql, tokenStream, DorisParser::selectHint)) + : logicalPlanBuilder; return (T) realLogicalPlanBuilder.visit(tree); } public LogicalPlan parseForCreateView(String sql) { - ParserRuleContext tree = toAst(sql, DorisParser::singleStatement); + CommonTokenStream tokenStream = parseAllTokens(sql); + ParserRuleContext tree = toAst(tokenStream, DorisParser::singleStatement); LogicalPlanBuilder realLogicalPlanBuilder = new LogicalPlanBuilderForCreateView( - getHintMap(sql, DorisParser::selectHint)); + getHintMap(sql, tokenStream, DorisParser::selectHint)); return (LogicalPlan) realLogicalPlanBuilder.visit(tree); } + /** parseForSyncMv */ public Optional parseForSyncMv(String sql) { - ParserRuleContext tree = toAst(sql, DorisParser::singleStatement); + CommonTokenStream tokenStream = parseAllTokens(sql); + ParserRuleContext tree = toAst(tokenStream, DorisParser::singleStatement); LogicalPlanBuilderForSyncMv logicalPlanBuilderForSyncMv = new LogicalPlanBuilderForSyncMv( - getHintMap(sql, DorisParser::selectHint)); + getHintMap(sql, tokenStream, DorisParser::selectHint)); logicalPlanBuilderForSyncMv.visit(tree); return logicalPlanBuilderForSyncMv.getQuerySql(); } /** get hint map */ - public static Map getHintMap(String sql, + public static Map getHintMap(String sql, CommonTokenStream hintTokenStream, Function parseFunction) { // parse hint first round - DorisLexer hintLexer = new DorisLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); - CommonTokenStream hintTokenStream = new CommonTokenStream(hintLexer); - Map selectHintMap = Maps.newHashMap(); - Token hintToken = hintTokenStream.getTokenSource().nextToken(); + Iterator tokenIterator = hintTokenStream.getTokens().iterator(); + Token hintToken = tokenIterator.hasNext() ? tokenIterator.next() : null; while (hintToken != null && hintToken.getType() != DorisLexer.EOF) { if (hintToken.getChannel() == 2 && sql.charAt(hintToken.getStartIndex() + 2) == '+') { String hintSql = sql.substring(hintToken.getStartIndex() + 3, hintToken.getStopIndex() + 1); @@ -366,15 +370,19 @@ public static Map getHintMap(String sql, ParserRuleContext hintContext = parseFunction.apply(hintParser); selectHintMap.put(hintToken.getStartIndex(), hintContext); } - hintToken = hintTokenStream.getTokenSource().nextToken(); + hintToken = tokenIterator.hasNext() ? tokenIterator.next() : null; } return selectHintMap; } + public static ParserRuleContext toAst( + String sql, Function parseFunction) { + return toAst(parseAllTokens(sql), parseFunction); + } + /** toAst */ - public static ParserRuleContext toAst(String sql, Function parseFunction) { - DorisLexer lexer = new DorisLexer(new CaseInsensitiveStream(CharStreams.fromString(sql))); - CommonTokenStream tokenStream = new CommonTokenStream(lexer); + public static ParserRuleContext toAst( + CommonTokenStream tokenStream, Function parseFunction) { DorisParser parser = new DorisParser(tokenStream); parser.addParseListener(POST_PROCESSOR); @@ -405,9 +413,7 @@ public static ParserRuleContext toAst(String sql, Function> aggregate() { default PatternDescriptor> aggregate(PatternDescriptor child) { return new PatternDescriptor(new TypePattern(Aggregate.class, child.pattern), defaultPromise()); } + + /** + * create a aggregate pattern. + */ + default PatternDescriptor inlineTable() { + return new PatternDescriptor(new TypePattern(InlineTable.class), defaultPromise()); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java index 4cf3c75b68dc43..86d0495b851bd2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/RuleType.java @@ -62,6 +62,7 @@ public enum RuleType { BINDING_UNBOUND_TVF_RELATION_FUNCTION(RuleTypeClass.REWRITE), BINDING_SET_OPERATION_SLOT(RuleTypeClass.REWRITE), BINDING_INLINE_TABLE_SLOT(RuleTypeClass.REWRITE), + LOGICAL_INLINE_TABLE_TO_LOGICAL_UNION_OR_ONE_ROW_RELATION(RuleTypeClass.REWRITE), COUNT_LITERAL_REWRITE(RuleTypeClass.REWRITE), SUM_LITERAL_REWRITE(RuleTypeClass.REWRITE), @@ -496,8 +497,7 @@ public enum RuleType { IMPLEMENTATION_SENTINEL(RuleTypeClass.IMPLEMENTATION), // sentinel, use to count rules - SENTINEL(RuleTypeClass.SENTINEL), - ; + SENTINEL(RuleTypeClass.SENTINEL); private final RuleTypeClass ruleTypeClass; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindExpression.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindExpression.java index 1e481542baec12..c308a1e7e796e8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindExpression.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindExpression.java @@ -66,6 +66,7 @@ import org.apache.doris.nereids.trees.plans.JoinType; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.algebra.Aggregate; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.algebra.SetOperation; import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; import org.apache.doris.nereids.trees.plans.logical.LogicalAggregate; @@ -74,7 +75,6 @@ import org.apache.doris.nereids.trees.plans.logical.LogicalFilter; import org.apache.doris.nereids.trees.plans.logical.LogicalGenerate; import org.apache.doris.nereids.trees.plans.logical.LogicalHaving; -import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalIntersect; import org.apache.doris.nereids.trees.plans.logical.LogicalJoin; import org.apache.doris.nereids.trees.plans.logical.LogicalOneRowRelation; @@ -195,7 +195,7 @@ protected boolean condition(Rule rule, Plan plan) { logicalQualify(logicalHaving()).thenApply(this::bindQualifyHaving) ), RuleType.BINDING_INLINE_TABLE_SLOT.build( - logicalInlineTable().thenApply(this::bindInlineTable) + inlineTable().thenApply(this::bindInlineTable) ), RuleType.BINDING_ONE_ROW_RELATION_SLOT.build( // we should bind UnboundAlias in the UnboundOneRowRelation @@ -349,24 +349,24 @@ private LogicalOneRowRelation bindOneRowRelation(MatchingContext ctx) { - LogicalInlineTable logicalInlineTable = ctx.root; + private LogicalPlan bindInlineTable(MatchingContext ctx) { + InlineTable inlineTable = ctx.root; // ensure all expressions are valid. + List> constantExprsList = inlineTable.getConstantExprsList(); List relations - = Lists.newArrayListWithCapacity(logicalInlineTable.getConstantExprsList().size()); - for (int i = 0; i < logicalInlineTable.getConstantExprsList().size(); i++) { - for (NamedExpression constantExpr : logicalInlineTable.getConstantExprsList().get(i)) { + = Lists.newArrayListWithCapacity(constantExprsList.size()); + for (int i = 0; i < constantExprsList.size(); i++) { + List row = constantExprsList.get(i); + for (NamedExpression constantExpr : row) { if (constantExpr instanceof DefaultValueSlot) { throw new AnalysisException("Default expression" + " can't exist in SELECT statement at row " + (i + 1)); } } - relations.add(new UnboundOneRowRelation(StatementScopeIdGenerator.newRelationId(), - logicalInlineTable.getConstantExprsList().get(i))); + relations.add(new UnboundOneRowRelation(StatementScopeIdGenerator.newRelationId(), row)); } // construct union all tree - return LogicalPlanBuilder.reduceToLogicalPlanTree(0, relations.size() - 1, - relations, Qualifier.ALL); + return LogicalPlanBuilder.reduceToLogicalPlanTree(0, relations.size() - 1, relations, Qualifier.ALL); } private LogicalHaving bindHaving(MatchingContext> ctx) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java index 9b4ff631a838b6..f08148d73e7fb6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/BindSink.java @@ -80,6 +80,7 @@ import org.apache.doris.nereids.util.ExpressionUtils; import org.apache.doris.nereids.util.RelationUtil; import org.apache.doris.nereids.util.TypeCoercionUtils; +import org.apache.doris.nereids.util.Utils; import org.apache.doris.qe.ConnectContext; import com.google.common.base.Preconditions; @@ -253,7 +254,7 @@ private Plan bindOlapTableSink(MatchingContext> ctx) { private LogicalProject getOutputProjectByCoercion(List tableSchema, LogicalPlan child, Map columnToOutput) { - List fullOutputExprs = ImmutableList.copyOf(columnToOutput.values()); + List fullOutputExprs = Utils.fastToImmutableList(columnToOutput.values()); if (child instanceof LogicalOneRowRelation) { // remove default value slot in one row relation child = ((LogicalOneRowRelation) child).withProjects(((LogicalOneRowRelation) child) @@ -274,6 +275,7 @@ private LogicalProject getOutputProjectByCoercion(List tableSchema, L // we skip it. continue; } + expr = expr.toSlot(); DataType inputType = expr.getDataType(); DataType targetType = DataType.fromCatalogType(tableSchema.get(i).getType()); Expression castExpr = expr; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/ExpressionRewrite.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/ExpressionRewrite.java index e5b74ee26bcb02..0fcc58e0273d2f 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/ExpressionRewrite.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/ExpressionRewrite.java @@ -43,6 +43,7 @@ import org.apache.doris.nereids.util.ExpressionUtils; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableSet; import java.util.Collection; @@ -85,7 +86,8 @@ public List buildRules() { new OlapTableSinkExpressionRewrite().build()); } - private class GenerateExpressionRewrite extends OneRewriteRuleFactory { + /** GenerateExpressionRewrite */ + public class GenerateExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalGenerate().thenApply(ctx -> { @@ -103,7 +105,8 @@ public Rule build() { } } - private class OneRowRelationExpressionRewrite extends OneRewriteRuleFactory { + /** OneRowRelationExpressionRewrite */ + public class OneRowRelationExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalOneRowRelation().thenApply(ctx -> { @@ -111,19 +114,25 @@ public Rule build() { List projects = oneRowRelation.getProjects(); ExpressionRewriteContext context = new ExpressionRewriteContext(ctx.cascadesContext); - List newProjects = projects - .stream() - .map(expr -> (NamedExpression) rewriter.rewrite(expr, context)) - .collect(ImmutableList.toImmutableList()); - if (projects.equals(newProjects)) { - return oneRowRelation; + Builder rewrittenExprs + = ImmutableList.builderWithExpectedSize(projects.size()); + boolean changed = false; + for (NamedExpression project : projects) { + NamedExpression newProject = (NamedExpression) rewriter.rewrite(project, context); + if (!changed && !project.deepEquals(newProject)) { + changed = true; + } + rewrittenExprs.add(newProject); } - return new LogicalOneRowRelation(oneRowRelation.getRelationId(), newProjects); + return changed + ? new LogicalOneRowRelation(oneRowRelation.getRelationId(), rewrittenExprs.build()) + : oneRowRelation; }).toRule(RuleType.REWRITE_ONE_ROW_RELATION_EXPRESSION); } } - private class ProjectExpressionRewrite extends OneRewriteRuleFactory { + /** ProjectExpressionRewrite */ + public class ProjectExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalProject().thenApply(ctx -> { @@ -139,7 +148,8 @@ public Rule build() { } } - private class FilterExpressionRewrite extends OneRewriteRuleFactory { + /** FilterExpressionRewrite */ + public class FilterExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalFilter().thenApply(ctx -> { @@ -155,7 +165,8 @@ public Rule build() { } } - private class OlapTableSinkExpressionRewrite extends OneRewriteRuleFactory { + /** OlapTableSinkExpressionRewrite */ + public class OlapTableSinkExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalOlapTableSink().thenApply(ctx -> { @@ -177,7 +188,8 @@ public Rule build() { } } - private class AggExpressionRewrite extends OneRewriteRuleFactory { + /** AggExpressionRewrite */ + public class AggExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalAggregate().thenApply(ctx -> { @@ -197,7 +209,8 @@ public Rule build() { } } - private class JoinExpressionRewrite extends OneRewriteRuleFactory { + /** JoinExpressionRewrite */ + public class JoinExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalJoin().thenApply(ctx -> { @@ -244,7 +257,8 @@ private Pair> rewriteConjuncts(List conjun } } - private class SortExpressionRewrite extends OneRewriteRuleFactory { + /** SortExpressionRewrite */ + public class SortExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { @@ -265,7 +279,8 @@ public Rule build() { } } - private class HavingExpressionRewrite extends OneRewriteRuleFactory { + /** HavingExpressionRewrite */ + public class HavingExpressionRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalHaving().thenApply(ctx -> { @@ -281,7 +296,8 @@ public Rule build() { } } - private class LogicalRepeatRewrite extends OneRewriteRuleFactory { + /** LogicalRepeatRewrite */ + public class LogicalRepeatRewrite extends OneRewriteRuleFactory { @Override public Rule build() { return logicalRepeat().thenApply(ctx -> { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/ConvertAggStateCast.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/ConvertAggStateCast.java index 6aa4529ddd4ab6..6d5a70139ab19c 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/ConvertAggStateCast.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/ConvertAggStateCast.java @@ -47,7 +47,8 @@ public List> buildRules() { ); } - private static Expression convert(Cast cast) { + /** convert */ + public static Expression convert(Cast cast) { Expression child = cast.child(); DataType originalType = child.getDataType(); DataType targetType = cast.getDataType(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnFE.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnFE.java index d1c385ec621062..c439458ff4c96e 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnFE.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/expression/rules/FoldConstantRuleOnFE.java @@ -24,7 +24,9 @@ import org.apache.doris.common.util.DebugUtil; import org.apache.doris.datasource.InternalCatalog; import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.analyzer.UnboundVariable; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.rules.analysis.ExpressionAnalyzer; import org.apache.doris.nereids.rules.expression.AbstractExpressionRewriteRule; import org.apache.doris.nereids.rules.expression.ExpressionListenerMatcher; import org.apache.doris.nereids.rules.expression.ExpressionMatchingContext; @@ -53,6 +55,7 @@ import org.apache.doris.nereids.trees.expressions.Or; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.expressions.TimestampArithmetic; +import org.apache.doris.nereids.trees.expressions.Variable; import org.apache.doris.nereids.trees.expressions.WhenClause; import org.apache.doris.nereids.trees.expressions.functions.BoundFunction; import org.apache.doris.nereids.trees.expressions.functions.PropagateNullLiteral; @@ -220,6 +223,12 @@ public Expression visitMatch(Match match, ExpressionRewriteContext context) { return super.visitMatch(match, context); } + @Override + public Expression visitUnboundVariable(UnboundVariable unboundVariable, ExpressionRewriteContext context) { + Variable variable = ExpressionAnalyzer.resolveUnboundVariable(unboundVariable); + return variable.getRealExpression(); + } + @Override public Expression visitEncryptKeyRef(EncryptKeyRef encryptKeyRef, ExpressionRewriteContext context) { String dbName = encryptKeyRef.getDbName(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/stats/ExpressionEstimation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/stats/ExpressionEstimation.java index 7d1b5439bace23..825bb6f7180d16 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/stats/ExpressionEstimation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/stats/ExpressionEstimation.java @@ -101,6 +101,7 @@ import com.google.common.base.Preconditions; import org.apache.commons.collections.CollectionUtils; +import java.time.DateTimeException; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; @@ -212,7 +213,7 @@ private ColumnStatistic castMinMax(ColumnStatistic colStats, DataType targetType long min = dateMinLiteral.getValue(); builder.setMinValue(min); builder.setMinExpr(dateMinLiteral.toLegacyLiteral()); - } catch (AnalysisException e) { + } catch (AnalysisException | DateTimeException e) { convertSuccess = false; } } @@ -223,7 +224,7 @@ private ColumnStatistic castMinMax(ColumnStatistic colStats, DataType targetType long max = dateMaxLiteral.getValue(); builder.setMaxValue(max); builder.setMaxExpr(dateMaxLiteral.toLegacyLiteral()); - } catch (AnalysisException e) { + } catch (AnalysisException | DateTimeException e) { convertSuccess = false; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java index ed99e3025e8603..eb8269d68fd0a8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteral.java @@ -30,6 +30,7 @@ import com.google.common.collect.ImmutableSet; +import java.time.DateTimeException; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.Year; @@ -269,8 +270,8 @@ static Result normalize(String s) { } /** parseDateLiteral */ - public static Result parseDateLiteral(String s) { - Result parseResult = parseDateTime(s); + public static Result parseDateLiteral(String s) { + Result parseResult = parseDateTime(s); if (parseResult.isError()) { return parseResult.cast(); } @@ -286,17 +287,24 @@ public static Result parseDateLiteral(String s) } /** parseDateTime */ - public static Result parseDateTime(String s) { - // fast parse '2022-01-01' - if (s.length() == 10 && s.charAt(4) == '-' && s.charAt(7) == '-') { - TemporalAccessor date = fastParseDate(s); - if (date != null) { - return Result.ok(date); - } - } - + public static Result parseDateTime(String s) { String originalString = s; try { + // fast parse '2022-01-01' + if ((s.length() == 10 || s.length() == 19) && s.charAt(4) == '-' && s.charAt(7) == '-') { + if (s.length() == 10) { + TemporalAccessor date = fastParseDate(s); + if (date != null) { + return Result.ok(date); + } + } else if (s.charAt(10) == ' ' && s.charAt(13) == ':' && s.charAt(16) == ':') { + TemporalAccessor date = fastParseDateTime(s); + if (date != null) { + return Result.ok(date); + } + } + } + TemporalAccessor dateTime; // remove suffix/prefix ' ' @@ -342,6 +350,10 @@ public static Result parseDateTime(String s } return Result.ok(dateTime); + } catch (DateTimeException e) { + return Result.err(() -> + new DateTimeException("date/datetime literal [" + originalString + "] is invalid", e) + ); } catch (Exception ex) { return Result.err(() -> new AnalysisException("date/datetime literal [" + originalString + "] is invalid")); } @@ -566,6 +578,21 @@ private static TemporalAccessor fastParseDate(String date) { } } + private static TemporalAccessor fastParseDateTime(String date) { + Integer year = readNextInt(date, 0, 4); + Integer month = readNextInt(date, 5, 2); + Integer day = readNextInt(date, 8, 2); + Integer hour = readNextInt(date, 11, 2); + Integer minute = readNextInt(date, 14, 2); + Integer second = readNextInt(date, 17, 2); + + if (year != null && month != null && day != null && hour != null && minute != null && second != null) { + return LocalDateTime.of(year, month, day, hour, minute, second); + } else { + return null; + } + } + private static Integer readNextInt(String str, int offset, int readLength) { int value = 0; int realReadLength = 0; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java index 169ed421934824..7912142f97feb9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/expressions/literal/DateTimeLiteral.java @@ -132,7 +132,7 @@ public static int determineScale(String s) { /** parseDateTimeLiteral */ public static Result parseDateTimeLiteral(String s, boolean isV2) { - Result parseResult = parseDateTime(s); + Result parseResult = parseDateTime(s); if (parseResult.isError()) { return parseResult.cast(); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Explainable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Explainable.java index 46771392e59cd9..77eef860b98a77 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Explainable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/Explainable.java @@ -17,11 +17,20 @@ package org.apache.doris.nereids.trees.plans; +import org.apache.doris.nereids.NereidsPlanner; +import org.apache.doris.nereids.StatementContext; +import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.qe.ConnectContext; +import java.util.Optional; + /** * plan can be explained. */ public interface Explainable { Plan getExplainPlan(ConnectContext ctx) throws Exception; + + default Optional getExplainPlanner(LogicalPlan logicalPlan, StatementContext ctx) throws Exception { + return Optional.empty(); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index 407610fbe08add..2860ec10092312 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -43,6 +43,7 @@ public enum PlanType { LOGICAL_UNBOUND_ONE_ROW_RELATION, LOGICAL_UNBOUND_RELATION, LOGICAL_UNBOUND_TVF_RELATION, + LOGICAL_UNBOUND_INLINE_TABLE, // logical sinks LOGICAL_FILE_SINK, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/algebra/InlineTable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/algebra/InlineTable.java new file mode 100644 index 00000000000000..0aded14ca77119 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/algebra/InlineTable.java @@ -0,0 +1,28 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.algebra; + +import org.apache.doris.nereids.trees.expressions.NamedExpression; +import org.apache.doris.nereids.trees.plans.LeafPlan; + +import java.util.List; + +/** InlineTable */ +public interface InlineTable extends LeafPlan { + List> getConstantExprsList(); +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExplainCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExplainCommand.java index e3f2f1d732ae5a..ea805f6cb0ceb6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExplainCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExplainCommand.java @@ -79,12 +79,16 @@ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { if (!(logicalPlan instanceof Explainable)) { throw new AnalysisException(logicalPlan.getClass().getSimpleName() + " cannot be explained"); } - explainPlan = ((LogicalPlan) ((Explainable) logicalPlan).getExplainPlan(ctx)); + Explainable explainable = (Explainable) logicalPlan; + explainPlan = ((LogicalPlan) explainable.getExplainPlan(ctx)); + NereidsPlanner planner = explainable.getExplainPlanner(explainPlan, ctx.getStatementContext()).orElseGet(() -> + new NereidsPlanner(ctx.getStatementContext()) + ); + LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(explainPlan, ctx.getStatementContext()); ExplainOptions explainOptions = new ExplainOptions(level, showPlanProcess); logicalPlanAdapter.setIsExplain(explainOptions); executor.setParsedStmt(logicalPlanAdapter); - NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext()); if (ctx.getSessionVariable().isEnableMaterializedViewRewrite()) { ctx.getStatementContext().addPlannerHook(InitMaterializationContextHook.INSTANCE); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/BatchInsertIntoTableCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/BatchInsertIntoTableCommand.java index b4a7a9eee3a148..4fb42a21fd780d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/BatchInsertIntoTableCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/BatchInsertIntoTableCommand.java @@ -26,19 +26,22 @@ import org.apache.doris.common.ErrorCode; import org.apache.doris.common.ErrorReport; import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.CascadesContext; import org.apache.doris.nereids.NereidsPlanner; +import org.apache.doris.nereids.StatementContext; import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.glue.LogicalPlanAdapter; +import org.apache.doris.nereids.properties.PhysicalProperties; import org.apache.doris.nereids.trees.TreeNode; import org.apache.doris.nereids.trees.expressions.ExprId; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.plans.Explainable; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.commands.Command; import org.apache.doris.nereids.trees.plans.commands.NoForward; -import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.physical.PhysicalOlapTableSink; import org.apache.doris.nereids.trees.plans.physical.PhysicalOneRowRelation; @@ -69,16 +72,34 @@ public class BatchInsertIntoTableCommand extends Command implements NoForward, E public static final Logger LOG = LogManager.getLogger(BatchInsertIntoTableCommand.class); - private LogicalPlan logicalQuery; + private LogicalPlan originLogicalQuery; + private Optional logicalQuery; public BatchInsertIntoTableCommand(LogicalPlan logicalQuery) { super(PlanType.BATCH_INSERT_INTO_TABLE_COMMAND); - this.logicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.originLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.logicalQuery = Optional.empty(); + } + + public LogicalPlan getLogicalQuery() { + return logicalQuery.orElse(originLogicalQuery); } @Override public Plan getExplainPlan(ConnectContext ctx) throws Exception { - return InsertUtils.getPlanForExplain(ctx, this.logicalQuery); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); + return InsertUtils.getPlanForExplain(ctx, analyzeContext, getLogicalQuery()); + } + + @Override + public Optional getExplainPlanner(LogicalPlan logicalPlan, StatementContext ctx) throws Exception { + ConnectContext connectContext = ctx.getConnectContext(); + TableIf targetTableIf = InsertUtils.getTargetTable(originLogicalQuery, connectContext); + boolean supportFastInsertIntoValues + = InsertUtils.supportFastInsertIntoValues(logicalPlan, targetTableIf, connectContext); + return Optional.of(new FastInsertIntoValuesPlanner(ctx, supportFastInsertIntoValues)); } @Override @@ -88,19 +109,32 @@ public R accept(PlanVisitor visitor, C context) { @Override public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { - UnboundTableSink unboundTableSink = (UnboundTableSink) logicalQuery; + UnboundTableSink unboundTableSink = (UnboundTableSink) originLogicalQuery; Plan query = unboundTableSink.child(); - if (!(query instanceof LogicalInlineTable)) { + if (!(query instanceof InlineTable)) { throw new AnalysisException("Insert into ** select is not supported in a transaction"); } PhysicalOlapTableSink sink; - TableIf targetTableIf = InsertUtils.getTargetTable(logicalQuery, ctx); + TableIf targetTableIf = InsertUtils.getTargetTable(originLogicalQuery, ctx); targetTableIf.readLock(); try { - this.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(logicalQuery, targetTableIf, Optional.empty()); - LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext()); - NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext()); + StatementContext statementContext = ctx.getStatementContext(); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(statementContext, originLogicalQuery, PhysicalProperties.ANY) + ); + + this.logicalQuery = Optional.of((LogicalPlan) InsertUtils.normalizePlan( + originLogicalQuery, targetTableIf, analyzeContext, Optional.empty() + )); + + LogicalPlan logicalQuery = this.logicalQuery.get(); + LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, statementContext); + + boolean supportFastInsertIntoValues + = InsertUtils.supportFastInsertIntoValues(logicalQuery, targetTableIf, ctx); + FastInsertIntoValuesPlanner planner = new FastInsertIntoValuesPlanner( + statementContext, supportFastInsertIntoValues, true); planner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift()); executor.checkBlockRules(); if (ctx.getConnectType() == ConnectType.MYSQL && ctx.getMysqlChannel() != null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/FastInsertIntoValuesPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/FastInsertIntoValuesPlanner.java new file mode 100644 index 00000000000000..18dcbf25d28aa6 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/FastInsertIntoValuesPlanner.java @@ -0,0 +1,166 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands.insert; + +import org.apache.doris.nereids.CascadesContext; +import org.apache.doris.nereids.NereidsPlanner; +import org.apache.doris.nereids.StatementContext; +import org.apache.doris.nereids.memo.Group; +import org.apache.doris.nereids.memo.GroupId; +import org.apache.doris.nereids.properties.PhysicalProperties; +import org.apache.doris.nereids.rules.Rule; +import org.apache.doris.nereids.rules.implementation.LogicalOlapTableSinkToPhysicalOlapTableSink; +import org.apache.doris.nereids.trees.plans.Plan; +import org.apache.doris.nereids.trees.plans.logical.LogicalOlapTableSink; +import org.apache.doris.nereids.trees.plans.logical.LogicalOneRowRelation; +import org.apache.doris.nereids.trees.plans.logical.LogicalProject; +import org.apache.doris.nereids.trees.plans.logical.LogicalUnion; +import org.apache.doris.nereids.trees.plans.physical.PhysicalOneRowRelation; +import org.apache.doris.nereids.trees.plans.physical.PhysicalPlan; +import org.apache.doris.nereids.trees.plans.physical.PhysicalProject; +import org.apache.doris.nereids.trees.plans.physical.PhysicalUnion; +import org.apache.doris.nereids.trees.plans.visitor.DefaultPlanRewriter; + +import java.util.concurrent.atomic.AtomicReference; + +/** FastInsertIntoValuesPlanner */ +public class FastInsertIntoValuesPlanner extends NereidsPlanner { + private static final Rule toPhysicalOlapTableSink = new LogicalOlapTableSinkToPhysicalOlapTableSink() + .build(); + protected final boolean fastInsertIntoValues; + protected final boolean batchInsert; + private final AtomicReference rootGroupRef = new AtomicReference<>(); + + public FastInsertIntoValuesPlanner(StatementContext statementContext, boolean fastInsertIntoValues) { + this(statementContext, fastInsertIntoValues, false); + } + + public FastInsertIntoValuesPlanner( + StatementContext statementContext, boolean fastInsertIntoValues, boolean batchInsert) { + super(statementContext); + this.fastInsertIntoValues = fastInsertIntoValues; + this.batchInsert = batchInsert; + } + + @Override + protected void analyze(boolean showPlanProcess) { + if (!fastInsertIntoValues) { + super.analyze(showPlanProcess); + return; + } + CascadesContext cascadesContext = getCascadesContext(); + keepOrShowPlanProcess(showPlanProcess, () -> { + InsertIntoValuesAnalyzer analyzer = new InsertIntoValuesAnalyzer(cascadesContext, batchInsert); + analyzer.execute(); + }); + } + + @Override + protected void rewrite(boolean showPlanProcess) { + if (!fastInsertIntoValues) { + super.rewrite(showPlanProcess); + } + } + + @Override + protected void optimize() { + if (!fastInsertIntoValues) { + super.optimize(); + return; + } + + DefaultPlanRewriter optimizer = new DefaultPlanRewriter() { + @Override + public Plan visitLogicalUnion(LogicalUnion logicalUnion, Void context) { + logicalUnion = (LogicalUnion) super.visitLogicalUnion(logicalUnion, context); + + return new PhysicalUnion(logicalUnion.getQualifier(), + logicalUnion.getOutputs(), + logicalUnion.getRegularChildrenOutputs(), + logicalUnion.getConstantExprsList(), + logicalUnion.getLogicalProperties(), + logicalUnion.children() + ); + } + + @Override + public Plan visitLogicalOneRowRelation(LogicalOneRowRelation oneRowRelation, Void context) { + return new PhysicalOneRowRelation( + oneRowRelation.getRelationId(), + oneRowRelation.getProjects(), + oneRowRelation.getLogicalProperties()); + } + + @Override + public Plan visitLogicalProject(LogicalProject logicalProject, Void context) { + logicalProject = + (LogicalProject) super.visitLogicalProject(logicalProject, context); + + return new PhysicalProject<>( + logicalProject.getProjects(), + logicalProject.getLogicalProperties(), + logicalProject.child() + ); + } + + @Override + public Plan visitLogicalOlapTableSink(LogicalOlapTableSink olapTableSink, + Void context) { + olapTableSink = + (LogicalOlapTableSink) super.visitLogicalOlapTableSink(olapTableSink, context); + return toPhysicalOlapTableSink + .transform(olapTableSink, getCascadesContext()) + .get(0); + } + }; + + PhysicalPlan physicalPlan = + (PhysicalPlan) getCascadesContext().getRewritePlan().accept(optimizer, null); + + super.physicalPlan = physicalPlan; + + GroupId rootGroupId = GroupId.createGenerator().getNextId(); + Group rootGroup = new Group(rootGroupId, physicalPlan.getLogicalProperties()); + rootGroupRef.set(rootGroup); + } + + @Override + public Group getRoot() { + if (!fastInsertIntoValues) { + return super.getRoot(); + } + return rootGroupRef.get(); + } + + @Override + protected PhysicalPlan chooseNthPlan( + Group rootGroup, PhysicalProperties physicalProperties, int nthPlan) { + if (!fastInsertIntoValues) { + return super.chooseNthPlan(rootGroup, physicalProperties, nthPlan); + } + return super.physicalPlan; + } + + @Override + protected PhysicalPlan postProcess(PhysicalPlan physicalPlan) { + if (!fastInsertIntoValues) { + return super.postProcess(physicalPlan); + } + return physicalPlan; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java index 10f9947974cdb0..96d5d56a7e10ff 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoTableCommand.java @@ -31,11 +31,13 @@ import org.apache.doris.datasource.jdbc.JdbcExternalTable; import org.apache.doris.load.loadv2.LoadStatistic; import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.CascadesContext; import org.apache.doris.nereids.NereidsPlanner; import org.apache.doris.nereids.StatementContext; import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.glue.LogicalPlanAdapter; +import org.apache.doris.nereids.properties.PhysicalProperties; import org.apache.doris.nereids.trees.expressions.Slot; import org.apache.doris.nereids.trees.plans.Explainable; import org.apache.doris.nereids.trees.plans.Plan; @@ -85,8 +87,8 @@ public class InsertIntoTableCommand extends Command implements ForwardWithSync, public static final Logger LOG = LogManager.getLogger(InsertIntoTableCommand.class); - private LogicalPlan originalLogicalQuery; - private LogicalPlan logicalQuery; + private LogicalPlan originLogicalQuery; + private Optional logicalQuery; private Optional labelName; /** * When source it's from job scheduler,it will be set. @@ -101,15 +103,15 @@ public class InsertIntoTableCommand extends Command implements ForwardWithSync, public InsertIntoTableCommand(LogicalPlan logicalQuery, Optional labelName, Optional insertCtx, Optional cte) { super(PlanType.INSERT_INTO_TABLE_COMMAND); - this.originalLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); - this.logicalQuery = originalLogicalQuery; + this.originLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); this.labelName = Objects.requireNonNull(labelName, "labelName should not be null"); + this.logicalQuery = Optional.empty(); this.insertCtx = insertCtx; this.cte = cte; } public LogicalPlan getLogicalQuery() { - return logicalQuery; + return logicalQuery.orElse(originLogicalQuery); } public Optional getLabelName() { @@ -149,7 +151,7 @@ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor executor */ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor stmtExecutor, boolean needBeginTransaction) throws Exception { - List qualifiedTargetTableName = InsertUtils.getTargetTableQualified(logicalQuery, ctx); + List qualifiedTargetTableName = InsertUtils.getTargetTableQualified(originLogicalQuery, ctx); AbstractInsertExecutor insertExecutor; int retryTimes = 0; @@ -214,8 +216,6 @@ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor stmtExec // so we need to set this here insertExecutor.getCoordinator().setTxnId(insertExecutor.getTxnId()); stmtExecutor.setCoord(insertExecutor.getCoordinator()); - // for prepare and execute, avoiding normalization for every execute command - this.originalLogicalQuery = this.logicalQuery; return insertExecutor; } LOG.warn("insert plan failed {} times. query id is {}.", retryTimes, DebugUtil.printId(ctx.queryId())); @@ -226,17 +226,23 @@ private BuildInsertExecutorResult initPlanOnce(ConnectContext ctx, StmtExecutor stmtExecutor, TableIf targetTableIf) throws Throwable { targetTableIf.readLock(); try { + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); // process inline table (default values, empty values) - this.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(originalLogicalQuery, targetTableIf, insertCtx); + this.logicalQuery = Optional.of((LogicalPlan) InsertUtils.normalizePlan( + originLogicalQuery, targetTableIf, analyzeContext, insertCtx + )); if (cte.isPresent()) { - this.logicalQuery = ((LogicalPlan) cte.get().withChildren(logicalQuery)); + this.logicalQuery = Optional.of((LogicalPlan) cte.get().withChildren(logicalQuery.get())); } - OlapGroupCommitInsertExecutor.analyzeGroupCommit(ctx, targetTableIf, this.logicalQuery, this.insertCtx); + OlapGroupCommitInsertExecutor.analyzeGroupCommit( + ctx, targetTableIf, this.logicalQuery.get(), this.insertCtx); } finally { targetTableIf.readUnlock(); } - LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext()); + LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery.get(), ctx.getStatementContext()); return planInsertExecutor(ctx, stmtExecutor, logicalPlanAdapter, targetTableIf); } @@ -362,6 +368,9 @@ private ExecutorFactory selectInsertExecutorFactory( private BuildInsertExecutorResult planInsertExecutor( ConnectContext ctx, StmtExecutor stmtExecutor, LogicalPlanAdapter logicalPlanAdapter, TableIf targetTableIf) throws Throwable { + LogicalPlan logicalPlan = logicalPlanAdapter.getLogicalPlan(); + + boolean supportFastInsertIntoValues = InsertUtils.supportFastInsertIntoValues(logicalPlan, targetTableIf, ctx); // the key logical when use new coordinator: // 1. use NereidsPlanner to generate PhysicalPlan // 2. use PhysicalPlan to select InsertExecutorFactory, some InsertExecutors want to control @@ -372,10 +381,9 @@ private BuildInsertExecutorResult planInsertExecutor( // 3. NereidsPlanner use PhysicalPlan and the provided backend to generate DistributePlan // 4. ExecutorFactory use the DistributePlan to generate the NereidsSqlCoordinator and InsertExecutor - StatementContext statementContext = ctx.getStatementContext(); - AtomicReference executorFactoryRef = new AtomicReference<>(); - NereidsPlanner planner = new NereidsPlanner(statementContext) { + FastInsertIntoValuesPlanner planner = new FastInsertIntoValuesPlanner( + ctx.getStatementContext(), supportFastInsertIntoValues) { @Override protected void doDistribute(boolean canUseNereidsDistributePlanner) { // when enter this method, the step 1 already executed @@ -406,12 +414,24 @@ private void runInternal(ConnectContext ctx, StmtExecutor executor) throws Excep } public boolean isExternalTableSink() { - return !(logicalQuery instanceof UnboundTableSink); + return !(getLogicalQuery() instanceof UnboundTableSink); } @Override public Plan getExplainPlan(ConnectContext ctx) { - return InsertUtils.getPlanForExplain(ctx, this.logicalQuery); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); + return InsertUtils.getPlanForExplain(ctx, analyzeContext, getLogicalQuery()); + } + + @Override + public Optional getExplainPlanner(LogicalPlan logicalPlan, StatementContext ctx) { + ConnectContext connectContext = ctx.getConnectContext(); + TableIf targetTableIf = InsertUtils.getTargetTable(originLogicalQuery, connectContext); + boolean supportFastInsertIntoValues + = InsertUtils.supportFastInsertIntoValues(logicalPlan, targetTableIf, connectContext); + return Optional.of(new FastInsertIntoValuesPlanner(ctx, supportFastInsertIntoValues)); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoValuesAnalyzer.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoValuesAnalyzer.java new file mode 100644 index 00000000000000..1c630a41c846fe --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertIntoValuesAnalyzer.java @@ -0,0 +1,156 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands.insert; + +import org.apache.doris.common.Pair; +import org.apache.doris.nereids.CascadesContext; +import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.jobs.executor.AbstractBatchJobExecutor; +import org.apache.doris.nereids.jobs.rewrite.RewriteJob; +import org.apache.doris.nereids.rules.Rule; +import org.apache.doris.nereids.rules.RuleType; +import org.apache.doris.nereids.rules.analysis.BindSink; +import org.apache.doris.nereids.rules.expression.ExpressionRewrite; +import org.apache.doris.nereids.rules.expression.ExpressionRewriteRule; +import org.apache.doris.nereids.rules.expression.rules.ConvertAggStateCast; +import org.apache.doris.nereids.rules.expression.rules.FoldConstantRuleOnFE; +import org.apache.doris.nereids.rules.rewrite.MergeProjects; +import org.apache.doris.nereids.rules.rewrite.OneRewriteRuleFactory; +import org.apache.doris.nereids.rules.rewrite.PushProjectIntoOneRowRelation; +import org.apache.doris.nereids.rules.rewrite.PushProjectIntoUnion; +import org.apache.doris.nereids.trees.expressions.NamedExpression; +import org.apache.doris.nereids.trees.expressions.SlotReference; +import org.apache.doris.nereids.trees.expressions.StatementScopeIdGenerator; +import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; +import org.apache.doris.nereids.trees.plans.logical.LogicalOneRowRelation; +import org.apache.doris.nereids.trees.plans.logical.LogicalUnion; +import org.apache.doris.nereids.types.DataType; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.util.List; + +/** InsertIntoValuesAnalyzer */ +public class InsertIntoValuesAnalyzer extends AbstractBatchJobExecutor { + public static final List INSERT_JOBS = jobs( + bottomUp( + new InlineTableToUnionOrOneRowRelation(), + new BindSink(), + new MergeProjects(), + // after bind olap table sink, the LogicalProject will be generated under LogicalOlapTableSink, + // we should convert the agg state function in the project, and evaluate some env parameters + // like encrypt key reference, for example: `values (aes_encrypt("abc",key test.my_key))`, + // we should replace the `test.my_key` to real key + new RewriteInsertIntoExpressions(ExpressionRewrite.bottomUp( + ConvertAggStateCast.INSTANCE, + FoldConstantRuleOnFE.PATTERN_MATCH_INSTANCE + )) + ) + ); + + public static final List BATCH_INSERT_JOBS = jobs( + bottomUp( + new InlineTableToUnionOrOneRowRelation(), + new BindSink(), + new MergeProjects(), + + // the BatchInsertIntoTableCommand need send StringLiteral to backend, + // and only support alias(literal as xx) or alias(cast(literal as xx)), + // but not support alias(cast(slotRef as xx)) which create in BindSink, + // we should push down the cast into Union or OneRowRelation. + // the InsertIntoTableCommand support translate slotRef in the TPlan, + // so we don't need this rules, just evaluate in backend + new PushProjectIntoUnion(), + new PushProjectIntoOneRowRelation(), + + new RewriteBatchInsertIntoExpressions(ExpressionRewrite.bottomUp( + ConvertAggStateCast.INSTANCE, + FoldConstantRuleOnFE.PATTERN_MATCH_INSTANCE + )) + ) + ); + + private final boolean batchInsert; + + public InsertIntoValuesAnalyzer(CascadesContext cascadesContext, boolean batchInsert) { + super(cascadesContext); + this.batchInsert = batchInsert; + } + + @Override + public List getJobs() { + return batchInsert ? BATCH_INSERT_JOBS : INSERT_JOBS; + } + + // we only rewrite the project's expression + private static class RewriteInsertIntoExpressions extends ExpressionRewrite { + public RewriteInsertIntoExpressions(ExpressionRewriteRule... rules) { + super(rules); + } + + @Override + public List buildRules() { + return ImmutableList.of( + new ProjectExpressionRewrite().build() + ); + } + } + + // we only rewrite the project's and one row relation expression + private static class RewriteBatchInsertIntoExpressions extends ExpressionRewrite { + public RewriteBatchInsertIntoExpressions(ExpressionRewriteRule... rules) { + super(rules); + } + + @Override + public List buildRules() { + return ImmutableList.of( + new ProjectExpressionRewrite().build(), + new OneRowRelationExpressionRewrite().build() + ); + } + } + + private static class InlineTableToUnionOrOneRowRelation extends OneRewriteRuleFactory { + @Override + public Rule build() { + return inlineTable().then(inlineTable -> { + List> originConstants = inlineTable.getConstantExprsList(); + if (originConstants.size() > 1) { + Pair>, List> castedConstantsAndNullables + = LogicalUnion.castCommonDataTypeAndNullableByConstants(originConstants); + List> castedRows = castedConstantsAndNullables.key(); + List nullables = castedConstantsAndNullables.value(); + List outputs = Lists.newArrayList(); + List firstRow = originConstants.get(0); + for (int columnId = 0; columnId < firstRow.size(); columnId++) { + String name = firstRow.get(columnId).getName(); + DataType commonDataType = castedRows.get(0).get(columnId).getDataType(); + outputs.add(new SlotReference(name, commonDataType, nullables.get(columnId))); + } + return new LogicalUnion(Qualifier.ALL, castedRows, ImmutableList.of()).withNewOutputs(outputs); + } else if (originConstants.size() == 1) { + return new LogicalOneRowRelation(StatementScopeIdGenerator.newRelationId(), originConstants.get(0)); + } else { + throw new AnalysisException("Illegal inline table with empty constants"); + } + }).toRule(RuleType.LOGICAL_INLINE_TABLE_TO_LOGICAL_UNION_OR_ONE_ROW_RELATION); + } + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertOverwriteTableCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertOverwriteTableCommand.java index c89a4fc7be96ee..68c71de2d9e8b8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertOverwriteTableCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertOverwriteTableCommand.java @@ -32,13 +32,16 @@ import org.apache.doris.insertoverwrite.InsertOverwriteUtil; import org.apache.doris.mtmv.MTMVUtil; import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.CascadesContext; import org.apache.doris.nereids.NereidsPlanner; +import org.apache.doris.nereids.StatementContext; import org.apache.doris.nereids.analyzer.UnboundHiveTableSink; import org.apache.doris.nereids.analyzer.UnboundIcebergTableSink; import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.analyzer.UnboundTableSinkCreator; import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.glue.LogicalPlanAdapter; +import org.apache.doris.nereids.properties.PhysicalProperties; import org.apache.doris.nereids.trees.TreeNode; import org.apache.doris.nereids.trees.plans.Explainable; import org.apache.doris.nereids.trees.plans.Plan; @@ -82,7 +85,8 @@ public class InsertOverwriteTableCommand extends Command implements ForwardWithS private static final Logger LOG = LogManager.getLogger(InsertOverwriteTableCommand.class); - private LogicalPlan logicalQuery; + private LogicalPlan originLogicalQuery; + private Optional logicalQuery; private Optional labelName; private final Optional cte; private AtomicBoolean isCancelled = new AtomicBoolean(false); @@ -94,7 +98,8 @@ public class InsertOverwriteTableCommand extends Command implements ForwardWithS public InsertOverwriteTableCommand(LogicalPlan logicalQuery, Optional labelName, Optional cte) { super(PlanType.INSERT_INTO_TABLE_COMMAND); - this.logicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.originLogicalQuery = Objects.requireNonNull(logicalQuery, "logicalQuery should not be null"); + this.logicalQuery = Optional.empty(); this.labelName = Objects.requireNonNull(labelName, "labelName should not be null"); this.cte = cte; } @@ -103,14 +108,18 @@ public void setLabelName(Optional labelName) { this.labelName = labelName; } - public boolean isAutoDetectOverwrite() { + public boolean isAutoDetectOverwrite(LogicalPlan logicalQuery) { return (logicalQuery instanceof UnboundTableSink) - && ((UnboundTableSink) this.logicalQuery).isAutoDetectPartition(); + && ((UnboundTableSink) logicalQuery).isAutoDetectPartition(); + } + + public LogicalPlan getLogicalQuery() { + return logicalQuery.orElse(originLogicalQuery); } @Override public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { - TableIf targetTableIf = InsertUtils.getTargetTable(logicalQuery, ctx); + TableIf targetTableIf = InsertUtils.getTargetTable(originLogicalQuery, ctx); //check allow insert overwrite if (!allowInsertOverwrite(targetTableIf)) { String errMsg = "insert into overwrite only support OLAP and HMS/ICEBERG table." @@ -122,12 +131,20 @@ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { if (targetTableIf instanceof MTMV && !MTMVUtil.allowModifyMTMVData(ctx)) { throw new AnalysisException("Not allowed to perform current operation on async materialized view"); } - this.logicalQuery = (LogicalPlan) InsertUtils.normalizePlan(logicalQuery, targetTableIf, Optional.empty()); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); + this.logicalQuery = Optional.of((LogicalPlan) InsertUtils.normalizePlan( + originLogicalQuery, targetTableIf, analyzeContext, Optional.empty())); if (cte.isPresent()) { - this.logicalQuery = (LogicalPlan) logicalQuery.withChildren(cte.get().withChildren( - this.logicalQuery.child(0))); + LogicalPlan logicalQuery = this.logicalQuery.get(); + this.logicalQuery = Optional.of( + (LogicalPlan) logicalQuery.withChildren( + cte.get().withChildren(logicalQuery.child(0)) + ) + ); } - + LogicalPlan logicalQuery = this.logicalQuery.get(); LogicalPlanAdapter logicalPlanAdapter = new LogicalPlanAdapter(logicalQuery, ctx.getStatementContext()); NereidsPlanner planner = new NereidsPlanner(ctx.getStatementContext()); planner.plan(logicalPlanAdapter, ctx.getSessionVariable().toThrift()); @@ -172,7 +189,7 @@ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { isRunning.set(true); long taskId = 0; try { - if (isAutoDetectOverwrite()) { + if (isAutoDetectOverwrite(getLogicalQuery())) { // taskId here is a group id. it contains all replace tasks made and registered in rpc process. taskId = insertOverwriteManager.registerTaskGroup(); // When inserting, BE will call to replace partition by FrontendService. FE will register new temp @@ -219,7 +236,7 @@ public void run(ConnectContext ctx, StmtExecutor executor) throws Exception { } } catch (Exception e) { LOG.warn("insert into overwrite failed with task(or group) id " + taskId); - if (isAutoDetectOverwrite()) { + if (isAutoDetectOverwrite(getLogicalQuery())) { insertOverwriteManager.taskGroupFail(taskId); } else { insertOverwriteManager.taskFail(taskId); @@ -287,6 +304,7 @@ private void insertIntoPartitions(ConnectContext ctx, StmtExecutor executor, Lis // copy sink tot replace by tempPartitions UnboundLogicalSink copySink; InsertCommandContext insertCtx; + LogicalPlan logicalQuery = getLogicalQuery(); if (logicalQuery instanceof UnboundTableSink) { UnboundTableSink sink = (UnboundTableSink) logicalQuery; copySink = (UnboundLogicalSink) UnboundTableSinkCreator.createUnboundTableSink( @@ -342,6 +360,7 @@ private void insertIntoPartitions(ConnectContext ctx, StmtExecutor executor, Lis */ private void insertIntoAutoDetect(ConnectContext ctx, StmtExecutor executor, long groupId) throws Exception { InsertCommandContext insertCtx; + LogicalPlan logicalQuery = getLogicalQuery(); if (logicalQuery instanceof UnboundTableSink) { // 1. when overwrite auto-detect, allow auto partition or not is controlled by session variable. // 2. we save and pass overwrite auto detect by insertCtx @@ -362,7 +381,23 @@ private void insertIntoAutoDetect(ConnectContext ctx, StmtExecutor executor, lon @Override public Plan getExplainPlan(ConnectContext ctx) { - return InsertUtils.getPlanForExplain(ctx, this.logicalQuery); + Optional analyzeContext = Optional.of( + CascadesContext.initContext(ctx.getStatementContext(), originLogicalQuery, PhysicalProperties.ANY) + ); + return InsertUtils.getPlanForExplain(ctx, analyzeContext, getLogicalQuery()); + } + + @Override + public Optional getExplainPlanner(LogicalPlan logicalPlan, StatementContext ctx) { + LogicalPlan logicalQuery = getLogicalQuery(); + if (logicalQuery instanceof UnboundTableSink) { + boolean allowAutoPartition = ctx.getConnectContext().getSessionVariable().isEnableAutoCreateWhenOverwrite(); + OlapInsertCommandContext insertCtx = new OlapInsertCommandContext(allowAutoPartition, true); + InsertIntoTableCommand insertIntoTableCommand = new InsertIntoTableCommand( + logicalQuery, labelName, Optional.of(insertCtx), Optional.empty()); + return insertIntoTableCommand.getExplainPlanner(logicalPlan, ctx); + } + return Optional.empty(); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java index 459ffcd04f894a..497a287e802823 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/InsertUtils.java @@ -29,30 +29,40 @@ import org.apache.doris.common.FormatOptions; import org.apache.doris.datasource.hive.HMSExternalTable; import org.apache.doris.datasource.jdbc.JdbcExternalTable; +import org.apache.doris.nereids.CascadesContext; +import org.apache.doris.nereids.analyzer.Scope; import org.apache.doris.nereids.analyzer.UnboundAlias; +import org.apache.doris.nereids.analyzer.UnboundFunction; import org.apache.doris.nereids.analyzer.UnboundHiveTableSink; import org.apache.doris.nereids.analyzer.UnboundIcebergTableSink; +import org.apache.doris.nereids.analyzer.UnboundInlineTable; import org.apache.doris.nereids.analyzer.UnboundJdbcTableSink; -import org.apache.doris.nereids.analyzer.UnboundOneRowRelation; +import org.apache.doris.nereids.analyzer.UnboundSlot; +import org.apache.doris.nereids.analyzer.UnboundStar; import org.apache.doris.nereids.analyzer.UnboundTableSink; +import org.apache.doris.nereids.analyzer.UnboundVariable; import org.apache.doris.nereids.exceptions.AnalysisException; -import org.apache.doris.nereids.parser.LogicalPlanBuilder; import org.apache.doris.nereids.parser.NereidsParser; +import org.apache.doris.nereids.properties.PhysicalProperties; +import org.apache.doris.nereids.rules.analysis.ExpressionAnalyzer; +import org.apache.doris.nereids.rules.expression.ExpressionRewriteContext; +import org.apache.doris.nereids.rules.expression.rules.ConvertAggStateCast; +import org.apache.doris.nereids.rules.expression.rules.FoldConstantRuleOnFE; import org.apache.doris.nereids.trees.expressions.Alias; import org.apache.doris.nereids.trees.expressions.Cast; import org.apache.doris.nereids.trees.expressions.DefaultValueSlot; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.NamedExpression; -import org.apache.doris.nereids.trees.expressions.StatementScopeIdGenerator; import org.apache.doris.nereids.trees.expressions.literal.ArrayLiteral; import org.apache.doris.nereids.trees.expressions.literal.Literal; import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; import org.apache.doris.nereids.trees.plans.Plan; -import org.apache.doris.nereids.trees.plans.algebra.SetOperation.Qualifier; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.commands.info.DMLCommandType; import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.logical.UnboundLogicalSink; +import org.apache.doris.nereids.types.AggStateType; import org.apache.doris.nereids.types.DataType; import org.apache.doris.nereids.util.RelationUtil; import org.apache.doris.nereids.util.TypeCoercionUtils; @@ -79,7 +89,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; +import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; @@ -260,7 +272,9 @@ private static void beginBatchInsertTransaction(ConnectContext ctx, /** * normalize plan to let it could be process correctly by nereids */ - public static Plan normalizePlan(Plan plan, TableIf table, Optional insertCtx) { + public static Plan normalizePlan(LogicalPlan plan, TableIf table, + Optional analyzeContext, + Optional insertCtx) { UnboundLogicalSink unboundLogicalSink = (UnboundLogicalSink) plan; if (table instanceof HMSExternalTable) { HMSExternalTable hiveTable = (HMSExternalTable) table; @@ -334,21 +348,39 @@ public static Plan normalizePlan(Plan plan, TableIf table, Optional oneRowRelationBuilder = ImmutableList.builder(); + + UnboundInlineTable unboundInlineTable = (UnboundInlineTable) query; + ImmutableList.Builder> optimizedRowConstructors + = ImmutableList.builderWithExpectedSize(unboundInlineTable.getConstantExprsList().size()); List columns = table.getBaseSchema(false); - for (List values : logicalInlineTable.getConstantExprsList()) { - ImmutableList.Builder constantExprs = ImmutableList.builder(); + ConnectContext context = ConnectContext.get(); + ExpressionRewriteContext rewriteContext = null; + if (context != null && context.getStatementContext() != null) { + rewriteContext = new ExpressionRewriteContext( + CascadesContext.initContext( + context.getStatementContext(), unboundInlineTable, PhysicalProperties.ANY + ) + ); + } + + Optional analyzer = analyzeContext.map( + cascadesContext -> buildExprAnalyzer(plan, cascadesContext) + ); + + for (List values : unboundInlineTable.getConstantExprsList()) { + ImmutableList.Builder optimizedRowConstructor = ImmutableList.builder(); if (values.isEmpty()) { if (CollectionUtils.isNotEmpty(unboundLogicalSink.getColNames())) { throw new AnalysisException("value list should not be empty if columns are specified"); } - for (Column column : columns) { - constantExprs.add(generateDefaultExpression(column)); + for (int i = 0; i < columns.size(); i++) { + Column column = columns.get(i); + NamedExpression defaultExpression = generateDefaultExpression(column); + addColumnValue(analyzer, optimizedRowConstructor, defaultExpression); } } else { if (CollectionUtils.isNotEmpty(unboundLogicalSink.getColNames())) { @@ -374,10 +406,15 @@ public static Plan normalizePlan(Plan plan, TableIf table, Optional oneRowRelations = oneRowRelationBuilder.build(); - if (oneRowRelations.size() == 1) { - return plan.withChildren(oneRowRelations.get(0)); - } else { - return plan.withChildren( - LogicalPlanBuilder.reduceToLogicalPlanTree(0, oneRowRelations.size() - 1, - oneRowRelations, Qualifier.ALL)); + return plan.withChildren(new LogicalInlineTable(optimizedRowConstructors.build())); + } + + /** buildAnalyzer */ + public static ExpressionAnalyzer buildExprAnalyzer(Plan plan, CascadesContext analyzeContext) { + return new ExpressionAnalyzer(plan, new Scope(ImmutableList.of()), + analyzeContext, false, false) { + @Override + public Expression visitCast(Cast cast, ExpressionRewriteContext context) { + Expression expr = super.visitCast(cast, context); + if (expr instanceof Cast) { + if (expr.child(0).getDataType() instanceof AggStateType) { + expr = ConvertAggStateCast.convert((Cast) expr); + } else { + expr = FoldConstantRuleOnFE.evaluate(expr, context); + } + } + return expr; + } + + @Override + public Expression visitUnboundFunction(UnboundFunction unboundFunction, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundFunction(unboundFunction, context); + if (expr instanceof UnboundFunction) { + throw new IllegalStateException("Can not analyze function " + unboundFunction.getName()); + } + return expr; + } + + @Override + public Expression visitUnboundSlot(UnboundSlot unboundSlot, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundSlot(unboundSlot, context); + if (expr instanceof UnboundFunction) { + throw new AnalysisException("Can not analyze slot " + unboundSlot.getName()); + } + return expr; + } + + @Override + public Expression visitUnboundVariable(UnboundVariable unboundVariable, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundVariable(unboundVariable, context); + if (expr instanceof UnboundVariable) { + throw new AnalysisException("Can not analyze variable " + unboundVariable.getName()); + } + return expr; + } + + @Override + public Expression visitUnboundAlias(UnboundAlias unboundAlias, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundAlias(unboundAlias, context); + if (expr instanceof UnboundVariable) { + throw new AnalysisException("Can not analyze alias"); + } + return expr; + } + + @Override + public Expression visitUnboundStar(UnboundStar unboundStar, ExpressionRewriteContext context) { + Expression expr = super.visitUnboundStar(unboundStar, context); + if (expr instanceof UnboundStar) { + List qualifier = unboundStar.getQualifier(); + List qualified = new ArrayList<>(qualifier); + qualified.add("*"); + throw new AnalysisException("Can not analyze " + StringUtils.join(qualified, ".")); + } + return expr; + } + }; + } + + private static void addColumnValue( + Optional analyzer, + ImmutableList.Builder optimizedRowConstructor, + NamedExpression value) { + if (analyzer.isPresent() && !(value instanceof Alias && value.child(0) instanceof Literal)) { + ExpressionAnalyzer expressionAnalyzer = analyzer.get(); + value = (NamedExpression) expressionAnalyzer.analyze( + value, new ExpressionRewriteContext(expressionAnalyzer.getCascadesContext()) + ); } + optimizedRowConstructor.add(value); } private static Expression castValue(Expression value, DataType targetType) { - if (value instanceof UnboundAlias) { - return value.withChildren(TypeCoercionUtils.castUnbound(((UnboundAlias) value).child(), targetType)); + if (value instanceof Alias) { + Expression oldChild = value.child(0); + Expression newChild = TypeCoercionUtils.castUnbound(oldChild, targetType); + return oldChild == newChild ? value : value.withChildren(newChild); + } else if (value instanceof UnboundAlias) { + UnboundAlias unboundAlias = (UnboundAlias) value; + return new Alias(TypeCoercionUtils.castUnbound(unboundAlias.child(), targetType)); } else { return TypeCoercionUtils.castUnbound(value, targetType); } @@ -484,8 +603,18 @@ private static NamedExpression generateDefaultExpression(Column column) { /** * get plan for explain. */ - public static Plan getPlanForExplain(ConnectContext ctx, LogicalPlan logicalQuery) { - return InsertUtils.normalizePlan(logicalQuery, InsertUtils.getTargetTable(logicalQuery, ctx), Optional.empty()); + public static Plan getPlanForExplain( + ConnectContext ctx, Optional analyzeContext, LogicalPlan logicalQuery) { + return InsertUtils.normalizePlan( + logicalQuery, InsertUtils.getTargetTable(logicalQuery, ctx), analyzeContext, Optional.empty()); + } + + /** supportFastInsertIntoValues */ + public static boolean supportFastInsertIntoValues( + LogicalPlan logicalPlan, TableIf targetTableIf, ConnectContext ctx) { + return logicalPlan instanceof UnboundTableSink && logicalPlan.child(0) instanceof InlineTable + && targetTableIf instanceof OlapTable + && ctx != null && ctx.getSessionVariable().isEnableFastAnalyzeInsertIntoValues(); } // check for insert into t1(a,b,gen_col) select 1,2,3; @@ -508,7 +637,7 @@ private static void checkGeneratedColumnForInsertIntoSelect(TableIf table, return; } Plan query = unboundLogicalSink.child(); - if (table instanceof OlapTable && !(query instanceof LogicalInlineTable)) { + if (table instanceof OlapTable && !(query instanceof InlineTable)) { OlapTable olapTable = (OlapTable) table; Set insertNames = Sets.newHashSet(); if (unboundLogicalSink.getColNames() != null) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/OlapGroupCommitInsertExecutor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/OlapGroupCommitInsertExecutor.java index e7b1f4d581892c..0f3e320edcd4bf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/OlapGroupCommitInsertExecutor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/insert/OlapGroupCommitInsertExecutor.java @@ -30,6 +30,7 @@ import org.apache.doris.nereids.NereidsPlanner; import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.algebra.OneRowRelation; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.logical.LogicalUnion; @@ -93,8 +94,11 @@ protected static void analyzeGroupCommit(ConnectContext ctx, TableIf table, Logi conditions.add(Pair.of(() -> !(insertCtx.isPresent() && insertCtx.get() instanceof OlapInsertCommandContext && ((OlapInsertCommandContext) insertCtx.get()).isOverwrite()), () -> "is overwrite command")); conditions.add(Pair.of( - () -> tableSink.child() instanceof OneRowRelation || tableSink.child() instanceof LogicalUnion, - () -> "not one row relation or union, class: " + tableSink.child().getClass().getName())); + () -> tableSink.child() instanceof OneRowRelation + || tableSink.child() instanceof LogicalUnion + || tableSink.child() instanceof InlineTable, + () -> "not one row relation or union or inline table, class: " + + tableSink.child().getClass().getName())); ctx.setGroupCommit(conditions.stream().allMatch(p -> p.first.getAsBoolean())); if (!ctx.isGroupCommit() && LOG.isDebugEnabled()) { for (Pair> pair : conditions) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalInlineTable.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalInlineTable.java index b2a2a1d83ca3e7..748bc8fdfa2223 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalInlineTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalInlineTable.java @@ -17,15 +17,19 @@ package org.apache.doris.nereids.trees.plans.logical; +import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.memo.GroupExpression; import org.apache.doris.nereids.properties.LogicalProperties; import org.apache.doris.nereids.trees.expressions.Expression; import org.apache.doris.nereids.trees.expressions.NamedExpression; import org.apache.doris.nereids.trees.expressions.Slot; +import org.apache.doris.nereids.trees.expressions.SlotReference; import org.apache.doris.nereids.trees.plans.BlockFuncDepsPropagation; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.nereids.util.Utils; import com.google.common.collect.ImmutableList; @@ -36,7 +40,7 @@ /** * represent value list such as values(1), (2), (3) will generate LogicalInlineTable((1), (2), (3)). */ -public class LogicalInlineTable extends LogicalLeaf implements BlockFuncDepsPropagation { +public class LogicalInlineTable extends LogicalLeaf implements InlineTable, BlockFuncDepsPropagation { private final List> constantExprsList; @@ -44,11 +48,16 @@ public LogicalInlineTable(List> constantExprsList) { this(constantExprsList, Optional.empty(), Optional.empty()); } + /** LogicalInlineTable */ public LogicalInlineTable(List> constantExprsList, Optional groupExpression, Optional logicalProperties) { super(PlanType.LOGICAL_INLINE_TABLE, groupExpression, logicalProperties); - this.constantExprsList = ImmutableList.copyOf( + + if (constantExprsList.isEmpty()) { + throw new AnalysisException("constantExprsList should now be empty"); + } + this.constantExprsList = Utils.fastToImmutableList( Objects.requireNonNull(constantExprsList, "constantExprsList should not be null")); } @@ -63,23 +72,49 @@ public R accept(PlanVisitor visitor, C context) { @Override public List getExpressions() { - return constantExprsList.stream().flatMap(List::stream).collect(ImmutableList.toImmutableList()); + ImmutableList.Builder expressions = ImmutableList.builderWithExpectedSize( + constantExprsList.size() * constantExprsList.get(0).size()); + + for (List namedExpressions : constantExprsList) { + expressions.addAll(namedExpressions); + } + + return expressions.build(); } @Override public Plan withGroupExpression(Optional groupExpression) { - return null; + return new LogicalInlineTable( + constantExprsList, groupExpression, Optional.of(getLogicalProperties()) + ); } @Override public Plan withGroupExprLogicalPropChildren(Optional groupExpression, Optional logicalProperties, List children) { - return null; + if (!children.isEmpty()) { + throw new AnalysisException("children should not be empty"); + } + return new LogicalInlineTable(constantExprsList, groupExpression, logicalProperties); } @Override public List computeOutput() { - return ImmutableList.of(); + int columnNum = constantExprsList.get(0).size(); + List firstRow = constantExprsList.get(0); + ImmutableList.Builder output = ImmutableList.builderWithExpectedSize(constantExprsList.size()); + for (int i = 0; i < columnNum; i++) { + NamedExpression firstRowColumn = firstRow.get(i); + boolean nullable = false; + for (List row : constantExprsList) { + if (row.get(i).nullable()) { + nullable = true; + break; + } + } + output.add(new SlotReference(firstRowColumn.getName(), firstRowColumn.getDataType(), nullable)); + } + return output.build(); } @Override @@ -98,4 +133,11 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(constantExprsList); } + + @Override + public String toString() { + return Utils.toSqlString("LogicalInlineTable[" + id.asInt() + "]", + "rowNum", constantExprsList.size(), + "constantExprsList", constantExprsList); + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOneRowRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOneRowRelation.java index 7023815c7c5b99..9fa14458ed38b9 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOneRowRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalOneRowRelation.java @@ -54,7 +54,7 @@ public LogicalOneRowRelation(RelationId relationId, List projec private LogicalOneRowRelation(RelationId relationId, List projects, Optional groupExpression, Optional logicalProperties) { super(relationId, PlanType.LOGICAL_ONE_ROW_RELATION, groupExpression, logicalProperties); - this.projects = ImmutableList.copyOf(Objects.requireNonNull(projects, "projects can not be null")); + this.projects = Utils.fastToImmutableList(Objects.requireNonNull(projects, "projects can not be null")); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java index 2e4ddb55ff2f02..e13ec2864b3cd8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalSetOperation.java @@ -216,7 +216,8 @@ public int getArity() { return children.size(); } - private DataType getAssignmentCompatibleType(DataType left, DataType right) { + /** getAssignmentCompatibleType */ + public static DataType getAssignmentCompatibleType(DataType left, DataType right) { if (left.isNullType()) { return right; } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalUnion.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalUnion.java index 459044100b632d..d9fae844c48912 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalUnion.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/logical/LogicalUnion.java @@ -17,6 +17,7 @@ package org.apache.doris.nereids.trees.plans.logical; +import org.apache.doris.common.Pair; import org.apache.doris.nereids.memo.GroupExpression; import org.apache.doris.nereids.properties.DataTrait; import org.apache.doris.nereids.properties.LogicalProperties; @@ -28,11 +29,14 @@ import org.apache.doris.nereids.trees.plans.PlanType; import org.apache.doris.nereids.trees.plans.algebra.Union; import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.nereids.types.DataType; +import org.apache.doris.nereids.util.TypeCoercionUtils; import org.apache.doris.nereids.util.Utils; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.BitSet; @@ -205,6 +209,14 @@ public void computeUniform(DataTrait.Builder builder) { // don't propagate uniform slots } + @Override + public boolean hasUnboundExpression() { + if (!constantExprsList.isEmpty() && children.isEmpty()) { + return false; + } + return super.hasUnboundExpression(); + } + private List mapSlotToIndex(Plan plan, List> equalSlotsList) { Map slotToIndex = new HashMap<>(); for (int i = 0; i < plan.getOutput().size(); i++) { @@ -280,4 +292,73 @@ public void computeEqualSet(DataTrait.Builder builder) { public void computeFd(DataTrait.Builder builder) { // don't generate } + + /** castCommonDataTypeAndNullableByConstants */ + public static Pair>, List> castCommonDataTypeAndNullableByConstants( + List> constantExprsList) { + int columnCount = constantExprsList.isEmpty() ? 0 : constantExprsList.get(0).size(); + Pair, List> commonInfo + = computeCommonDataTypeAndNullable(constantExprsList, columnCount); + List> castedRows = castToCommonType(constantExprsList, commonInfo.key(), columnCount); + List nullables = commonInfo.second; + return Pair.of(castedRows, nullables); + } + + private static Pair, List> computeCommonDataTypeAndNullable( + List> constantExprsList, int columnCount) { + List nullables = Lists.newArrayListWithCapacity(columnCount); + List commonDataTypes = Lists.newArrayListWithCapacity(columnCount); + List firstRow = constantExprsList.get(0); + for (int columnId = 0; columnId < columnCount; columnId++) { + Expression constant = firstRow.get(columnId).child(0); + Pair commonDataTypeAndNullable + = computeCommonDataTypeAndNullable(constant, columnId, constantExprsList); + commonDataTypes.add(commonDataTypeAndNullable.first); + nullables.add(commonDataTypeAndNullable.second); + } + return Pair.of(commonDataTypes, nullables); + } + + private static Pair computeCommonDataTypeAndNullable( + Expression firstRowExpr, int columnId, List> constantExprsList) { + DataType commonDataType = firstRowExpr.getDataType(); + boolean nullable = firstRowExpr.nullable(); + for (int rowId = 1; rowId < constantExprsList.size(); rowId++) { + NamedExpression namedExpression = constantExprsList.get(rowId).get(columnId); + Expression otherConstant = namedExpression.child(0); + nullable |= otherConstant.nullable(); + DataType otherDataType = otherConstant.getDataType(); + commonDataType = getAssignmentCompatibleType(commonDataType, otherDataType); + } + return Pair.of(commonDataType, nullable); + } + + private static List> castToCommonType( + List> rows, List commonDataTypes, int columnCount) { + ImmutableList.Builder> castedConstants + = ImmutableList.builderWithExpectedSize(rows.size()); + for (List row : rows) { + castedConstants.add(castToCommonType(row, commonDataTypes)); + } + return castedConstants.build(); + } + + private static List castToCommonType(List row, List commonTypes) { + ImmutableList.Builder castedRow = ImmutableList.builderWithExpectedSize(row.size()); + boolean changed = false; + for (int columnId = 0; columnId < row.size(); columnId++) { + NamedExpression constantAlias = row.get(columnId); + Expression constant = constantAlias.child(0); + DataType commonType = commonTypes.get(columnId); + if (commonType.equals(constant.getDataType())) { + castedRow.add(constantAlias); + } else { + changed = true; + Expression expression + = TypeCoercionUtils.castIfNotSameTypeStrict(constant, commonType); + castedRow.add((NamedExpression) constantAlias.withChildren(expression)); + } + } + return changed ? castedRow.build() : row; + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java index efe0a03e3708b4..90e92ca1ae2597 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/physical/PhysicalOlapTableSink.java @@ -178,7 +178,7 @@ public int hashCode() { @Override public String toString() { - return Utils.toSqlString("LogicalOlapTableSink[" + id.asInt() + "]", + return Utils.toSqlString("PhysicalOlapTableSink[" + id.asInt() + "]", "outputExprs", outputExprs, "database", database.getFullName(), "targetTable", targetTable.getName(), diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/PlanVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/PlanVisitor.java index 396c6e4f26569f..f7642ce572c1a3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/PlanVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/PlanVisitor.java @@ -17,6 +17,7 @@ package org.apache.doris.nereids.trees.plans.visitor; +import org.apache.doris.nereids.analyzer.UnboundInlineTable; import org.apache.doris.nereids.trees.plans.GroupPlan; import org.apache.doris.nereids.trees.plans.Plan; import org.apache.doris.nereids.trees.plans.commands.Command; @@ -99,6 +100,7 @@ public R visitCommand(Command command, C context) { return visit(command, context); } + // ******************************* // relations // ******************************* @@ -130,6 +132,10 @@ public R visitPhysicalSink(PhysicalSink physicalSink, C context) // ******************************* // Logical plans // ******************************* + public R visitUnboundInlineTable(UnboundInlineTable unboundInlineTable, C context) { + return visit(unboundInlineTable, context); + } + public R visitLogicalSqlCache(LogicalSqlCache sqlCache, C context) { return visit(sqlCache, context); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/TypeCoercionUtils.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/TypeCoercionUtils.java index 1da4353d20da33..1fb108b79fd6a6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/util/TypeCoercionUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/util/TypeCoercionUtils.java @@ -601,8 +601,7 @@ public static Optional characterLiteralTypeCoercion(String value, Da } else if (dataType.isDateTimeType() && DateTimeChecker.isValidDateTime(value)) { ret = DateTimeLiteral.parseDateTimeLiteral(value, false).orElse(null); } else if (dataType.isDateV2Type() && DateTimeChecker.isValidDateTime(value)) { - Result parseResult - = DateV2Literal.parseDateLiteral(value); + Result parseResult = DateV2Literal.parseDateLiteral(value); if (parseResult.isOk()) { ret = parseResult.get(); } else { diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java index cd1be6c5cb6df7..446960f9d56415 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/AuditLogHelper.java @@ -37,9 +37,9 @@ import org.apache.doris.nereids.analyzer.UnboundTableSink; import org.apache.doris.nereids.glue.LogicalPlanAdapter; import org.apache.doris.nereids.trees.plans.Plan; +import org.apache.doris.nereids.trees.plans.algebra.InlineTable; import org.apache.doris.nereids.trees.plans.commands.NeedAuditEncryption; import org.apache.doris.nereids.trees.plans.commands.insert.InsertIntoTableCommand; -import org.apache.doris.nereids.trees.plans.logical.LogicalInlineTable; import org.apache.doris.nereids.trees.plans.logical.LogicalPlan; import org.apache.doris.nereids.trees.plans.logical.LogicalUnion; import org.apache.doris.plugin.AuditEvent.AuditEventBuilder; @@ -162,8 +162,8 @@ private static int countValues(List children) { for (Plan child : children) { if (child instanceof UnboundOneRowRelation) { cnt++; - } else if (child instanceof LogicalInlineTable) { - cnt += ((LogicalInlineTable) child).getConstantExprsList().size(); + } else if (child instanceof InlineTable) { + cnt += ((InlineTable) child).getConstantExprsList().size(); } else if (child instanceof LogicalUnion) { cnt += countValues(child.children()); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java index cf26cce7383e1c..d62c00534b6d94 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/qe/SessionVariable.java @@ -366,6 +366,8 @@ public class SessionVariable implements Serializable, Writable { public static final String ENABLE_SINGLE_REPLICA_INSERT = "enable_single_replica_insert"; + public static final String ENABLE_FAST_ANALYZE_INSERT_INTO_VALUES = "enable_fast_analyze_into_values"; + public static final String ENABLE_FUNCTION_PUSHDOWN = "enable_function_pushdown"; public static final String ENABLE_EXT_FUNC_PRED_PUSHDOWN = "enable_ext_func_pred_pushdown"; @@ -1497,6 +1499,15 @@ public void setEnableLeftZigZag(boolean enableLeftZigZag) { needForward = true, varType = VariableAnnotation.EXPERIMENTAL) public boolean enableSingleReplicaInsert = false; + @VariableMgr.VarAttr( + name = ENABLE_FAST_ANALYZE_INSERT_INTO_VALUES, fuzzy = true, + description = { + "跳过大部分的优化规则,快速分析insert into values语句", + "Skip most optimization rules and quickly analyze insert into values statements" + } + ) + private boolean enableFastAnalyzeInsertIntoValues = true; + @VariableMgr.VarAttr(name = ENABLE_FUNCTION_PUSHDOWN, fuzzy = true) public boolean enableFunctionPushdown = false; @@ -3654,8 +3665,6 @@ public boolean isEnableExprTrace() { return enableExprTrace; } - - public boolean isEnableSingleReplicaInsert() { return enableSingleReplicaInsert; } @@ -3664,6 +3673,14 @@ public void setEnableSingleReplicaInsert(boolean enableSingleReplicaInsert) { this.enableSingleReplicaInsert = enableSingleReplicaInsert; } + public boolean isEnableFastAnalyzeInsertIntoValues() { + return enableFastAnalyzeInsertIntoValues; + } + + public void setEnableFastAnalyzeInsertIntoValues(boolean enableFastAnalyzeInsertIntoValues) { + this.enableFastAnalyzeInsertIntoValues = enableFastAnalyzeInsertIntoValues; + } + public boolean isEnableMemtableOnSinkNode() { return enableMemtableOnSinkNode; } diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateSqlTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateSqlTest.java index 29889efdd6ce13..32b3706b290fbd 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateSqlTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyComparisonPredicateSqlTest.java @@ -17,14 +17,12 @@ package org.apache.doris.nereids.rules.expression.rules; -import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.trees.expressions.literal.NullLiteral; import org.apache.doris.nereids.types.DateTimeV2Type; import org.apache.doris.nereids.util.MemoPatternMatchSupported; import org.apache.doris.nereids.util.PlanChecker; import org.apache.doris.utframe.TestWithFeService; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; class SimplifyComparisonPredicateSqlTest extends TestWithFeService implements MemoPatternMatchSupported { @@ -153,17 +151,30 @@ void dateLikeOverflow() { ) ); - Assertions.assertThrows(AnalysisException.class, () -> PlanChecker.from(connectContext) + PlanChecker.from(connectContext) .analyze("select CAST('2021-01-32 00:00:00' AS DATETIME(6)) = '2021-01-32 00:00:00'") .rewrite() - ); - Assertions.assertThrows(AnalysisException.class, () -> PlanChecker.from(connectContext) + .matches(logicalOneRowRelation().when(oneRowRelation -> + oneRowRelation.getExpressions().get(0).child(0) instanceof NullLiteral) + ); + + PlanChecker.from(connectContext) + .analyze("select CAST('2021-01-32 00:00:00' AS DATETIME(6)) = '2021-01-32 00:00:00'") + .rewrite() + .matches(logicalOneRowRelation().when(oneRowRelation -> + oneRowRelation.getExpressions().get(0).child(0) instanceof NullLiteral) + ); + PlanChecker.from(connectContext) .analyze("select CAST('2021-01-32 00:00:00' AS DATETIME(6)) = '2021-01-32 23:00:00'") .rewrite() - ); - Assertions.assertThrows(AnalysisException.class, () -> PlanChecker.from(connectContext) + .matches(logicalOneRowRelation().when(oneRowRelation -> + oneRowRelation.getExpressions().get(0).child(0) instanceof NullLiteral) + ); + PlanChecker.from(connectContext) .analyze("select CAST('2021-01-32 00:00:00' AS DATETIME(6)) = '1000'") .rewrite() - ); + .matches(logicalOneRowRelation().when(oneRowRelation -> + oneRowRelation.getExpressions().get(0).child(0) instanceof NullLiteral) + ); } } diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/SelectReplaceTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/SelectReplaceTest.java index a002fe1a9c04a9..8279cd5cc4ffeb 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/SelectReplaceTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/SelectReplaceTest.java @@ -136,7 +136,7 @@ public void testParse() { )); // need select * - String sql3 = "seelct k1, k2, v1, v2 replace(k1 / 2 as k1) from t1"; + String sql3 = "select k1, k2, v1, v2 replace(k1 / 2 as k1) from t1"; Assertions.assertThrows(ParseException.class, () -> PlanChecker.from(MemoTestUtils.createConnectContext()) .checkParse(sql3, (checker) -> checker.matches( logicalProject( @@ -152,7 +152,7 @@ public void testParse() { .checkParse(sql4, (checker) -> checker.matches( logicalProject( logicalCheckPolicy( - unboundRelation() + logicalOneRowRelation() ) ) ))); @@ -162,9 +162,7 @@ public void testParse() { Assertions.assertThrows(ParseException.class, () -> PlanChecker.from(MemoTestUtils.createConnectContext()) .checkParse(sql5, (checker) -> checker.matches( logicalProject( - logicalCheckPolicy( - unboundRelation() - ) + logicalOneRowRelation() ) ))); diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteralTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteralTest.java index 8db1c9446d0c6d..786355c83b76c6 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteralTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/trees/expressions/literal/DateLiteralTest.java @@ -23,6 +23,7 @@ import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; +import java.time.DateTimeException; import java.util.function.Consumer; class DateLiteralTest { @@ -67,7 +68,7 @@ void testDate() { new DateLiteral("2022-1-1"); new DateLiteral("20220101"); - Assertions.assertThrows(AnalysisException.class, () -> new DateLiteral("-01-01")); + Assertions.assertThrows(DateTimeException.class, () -> new DateLiteral("-01-01")); } @Test @@ -128,8 +129,8 @@ void testIrregularDate() { @Test void testWrongPunctuationDate() { - Assertions.assertThrows(AnalysisException.class, () -> new DateTimeV2Literal("2020€02€01")); - Assertions.assertThrows(AnalysisException.class, () -> new DateTimeV2Literal("2020【02】01")); + Assertions.assertThrows(DateTimeException.class, () -> new DateTimeV2Literal("2020€02€01")); + Assertions.assertThrows(DateTimeException.class, () -> new DateTimeV2Literal("2020【02】01")); } @Test diff --git a/regression-test/suites/compression_p0/load.groovy b/regression-test/suites/compression_p0/load.groovy index 70eeafea12ae48..722732f65c3b06 100644 --- a/regression-test/suites/compression_p0/load.groovy +++ b/regression-test/suites/compression_p0/load.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_compression", "p0") { +suite("load") { // test snappy compression algorithm def tableName = "test_snappy" diff --git a/regression-test/suites/datatype_p0/nested_types/query/test_nestedtypes_insert_into_select.groovy b/regression-test/suites/datatype_p0/nested_types/query/test_nestedtypes_insert_into_select.groovy index 633ad98d86f556..b448ad406bbde2 100644 --- a/regression-test/suites/datatype_p0/nested_types/query/test_nestedtypes_insert_into_select.groovy +++ b/regression-test/suites/datatype_p0/nested_types/query/test_nestedtypes_insert_into_select.groovy @@ -32,7 +32,7 @@ suite("test_nestedtypes_insert_into_select", "p0") { test { sql "insert into ast values ('text' , [named_struct('a',1,'b','home'),named_struct('a',2,'b','work')]);" - exception "mismatched input 'named_struct' expecting" + exception "no viable alternative at input '[named_struct'" } @@ -50,6 +50,6 @@ suite("test_nestedtypes_insert_into_select", "p0") { test { sql "insert into ast values ('text' , [named_struct('a',1,'b','home'),named_struct('a',2,'b','work')]);" - exception "mismatched input 'named_struct' expecting" + exception "no viable alternative at input '[named_struct'" } } diff --git a/regression-test/suites/index_p0/load.groovy b/regression-test/suites/index_p0/load.groovy index 174339f148300a..5416a5096329cb 100644 --- a/regression-test/suites/index_p0/load.groovy +++ b/regression-test/suites/index_p0/load.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_bitmap_index_load") { +suite("load") { def tbName = "test_decimal_bitmap_index_multi_page" sql """ diff --git a/regression-test/suites/insert_p0/insert_group_commit_with_exception.groovy b/regression-test/suites/insert_p0/insert_group_commit_with_exception.groovy index 054add11d9f3a3..166d329c455511 100644 --- a/regression-test/suites/insert_p0/insert_group_commit_with_exception.groovy +++ b/regression-test/suites/insert_p0/insert_group_commit_with_exception.groovy @@ -241,7 +241,7 @@ suite("insert_group_commit_with_exception") { assertTrue(false) } catch (Exception e) { logger.info("exception : " + e) - assertTrue(e.getMessage().contains("insert into cols should be corresponding to the query output")) + assertTrue(e.getMessage().contains("Column count doesn't match value count")) } } getRowCount(14) diff --git a/regression-test/suites/nereids_rules_p0/mv/dimension/dimension_2_inner_join.groovy b/regression-test/suites/nereids_rules_p0/mv/dimension/dimension_2_inner_join.groovy index a615c7316bdb56..44fc259a71a1cc 100644 --- a/regression-test/suites/nereids_rules_p0/mv/dimension/dimension_2_inner_join.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/dimension/dimension_2_inner_join.groovy @@ -19,7 +19,7 @@ This suite is a two dimensional test case file. It mainly tests the inner join and filter positions. */ -suite("partition_mv_rewrite_dimension_2_2") { +suite("dimension_2_inner_join") { String db = context.config.getDbNameByFile(context.file) sql "use ${db}" From 2d65c59968c180b9d2414f899306ce76a32e5a19 Mon Sep 17 00:00:00 2001 From: airborne12 Date: Mon, 23 Dec 2024 18:15:01 +0800 Subject: [PATCH 61/82] [chore](inverted index) do not print downgrade reason for inverted index try query (#45789) Change level of logs below to debug mode ``` will downgrade without index to evaluate predicate, because of res: [E-6004]hit count: 413, bkd inverted reached limit 50%, segment num rows:33 ``` --- be/src/olap/rowset/segment_v2/segment_iterator.cpp | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/be/src/olap/rowset/segment_v2/segment_iterator.cpp b/be/src/olap/rowset/segment_v2/segment_iterator.cpp index 366c6d3ce21a76..ec0f9104e050e7 100644 --- a/be/src/olap/rowset/segment_v2/segment_iterator.cpp +++ b/be/src/olap/rowset/segment_v2/segment_iterator.cpp @@ -839,7 +839,13 @@ bool SegmentIterator::_downgrade_without_index(Status res, bool need_remaining) // such as when index segment files are not generated // above case can downgrade without index query _opts.stats->inverted_index_downgrade_count++; - LOG(INFO) << "will downgrade without index to evaluate predicate, because of res: " << res; + if (!res.is()) { + LOG(INFO) << "will downgrade without index to evaluate predicate, because of res: " + << res; + } else { + VLOG_DEBUG << "will downgrade without index to evaluate predicate, because of res: " + << res; + } return true; } return false; From e5991ccf3a69fbcfdd80adf944c09c6f4d0b0fff Mon Sep 17 00:00:00 2001 From: morrySnow Date: Mon, 23 Dec 2024 18:56:11 +0800 Subject: [PATCH 62/82] [style](fe) add package directory structure matching check (#45794) ### What problem does this PR solve? Problem Summary: check fe-core package directory structure to ensure it is same with package name in files and fix check failed files --- fe/check/checkstyle/checkstyle.xml | 4 ++++ .../LogicalResultSinkToShortCircuitPointQuery.java | 0 .../{ => trees/plans/commands}/RecoverPartitionCommand.java | 0 .../java/org/apache/doris/httpv2/{ => rest}/CopyIntoTest.java | 0 4 files changed, 4 insertions(+) rename fe/fe-core/src/main/java/org/apache/doris/nereids/rules/{analysis => rewrite}/LogicalResultSinkToShortCircuitPointQuery.java (100%) rename fe/fe-core/src/main/java/org/apache/doris/nereids/{ => trees/plans/commands}/RecoverPartitionCommand.java (100%) rename fe/fe-core/src/test/java/org/apache/doris/httpv2/{ => rest}/CopyIntoTest.java (100%) diff --git a/fe/check/checkstyle/checkstyle.xml b/fe/check/checkstyle/checkstyle.xml index 39a1e5c569fd6a..663f17df0f583d 100644 --- a/fe/check/checkstyle/checkstyle.xml +++ b/fe/check/checkstyle/checkstyle.xml @@ -431,6 +431,10 @@ under the License. value="WhitespaceAround: ''{0}'' is not preceded with whitespace."/> + + + + diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/LogicalResultSinkToShortCircuitPointQuery.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/LogicalResultSinkToShortCircuitPointQuery.java similarity index 100% rename from fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/LogicalResultSinkToShortCircuitPointQuery.java rename to fe/fe-core/src/main/java/org/apache/doris/nereids/rules/rewrite/LogicalResultSinkToShortCircuitPointQuery.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/RecoverPartitionCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/RecoverPartitionCommand.java similarity index 100% rename from fe/fe-core/src/main/java/org/apache/doris/nereids/RecoverPartitionCommand.java rename to fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/RecoverPartitionCommand.java diff --git a/fe/fe-core/src/test/java/org/apache/doris/httpv2/CopyIntoTest.java b/fe/fe-core/src/test/java/org/apache/doris/httpv2/rest/CopyIntoTest.java similarity index 100% rename from fe/fe-core/src/test/java/org/apache/doris/httpv2/CopyIntoTest.java rename to fe/fe-core/src/test/java/org/apache/doris/httpv2/rest/CopyIntoTest.java From c961a8c8b37d2a1200d026cce7929dd84d9a3801 Mon Sep 17 00:00:00 2001 From: zhangdong Date: Mon, 23 Dec 2024 19:08:58 +0800 Subject: [PATCH 63/82] [case](auth)Add case for auth (#45478) ### What problem does this PR solve? add case - Restrictions on special users, roles, and resources - Permission control for select count - Permission control for tvf --- .../suites/account_p0/test_system_db.groovy | 46 ++++++++ .../suites/account_p0/test_system_role.groovy | 61 +++++++++++ .../suites/account_p0/test_system_user.groovy | 28 ++++- .../suites/auth_p0/test_catalogs_auth.groovy | 68 ++++++++++++ .../suites/auth_p0/test_mtmv_auth.groovy | 100 ++++++++++++++++++ .../test_partition_values_tvf_auth.groovy | 69 ++++++++++++ .../auth_p0/test_partitions_auth.groovy | 84 +++++++++++++++ .../suites/auth_p0/test_query_tvf_auth.groovy | 74 +++++++++++++ .../auth_p0/test_select_count_auth.groovy | 93 ++++++++++++++++ .../tvf/test_iceberg_meta.groovy | 34 +++++- 10 files changed, 655 insertions(+), 2 deletions(-) create mode 100644 regression-test/suites/account_p0/test_system_db.groovy create mode 100644 regression-test/suites/account_p0/test_system_role.groovy create mode 100644 regression-test/suites/auth_p0/test_catalogs_auth.groovy create mode 100644 regression-test/suites/auth_p0/test_mtmv_auth.groovy create mode 100644 regression-test/suites/auth_p0/test_partition_values_tvf_auth.groovy create mode 100644 regression-test/suites/auth_p0/test_partitions_auth.groovy create mode 100644 regression-test/suites/auth_p0/test_query_tvf_auth.groovy create mode 100644 regression-test/suites/auth_p0/test_select_count_auth.groovy diff --git a/regression-test/suites/account_p0/test_system_db.groovy b/regression-test/suites/account_p0/test_system_db.groovy new file mode 100644 index 00000000000000..11b9d6d492bf42 --- /dev/null +++ b/regression-test/suites/account_p0/test_system_db.groovy @@ -0,0 +1,46 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_system_db","p0,auth") { + String suiteName = "test_system_db" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + sql """ + grant select_priv on __internal_schema.* to `${user}`; + """ + sql """ + grant select_priv on information_schema.* to `${user}`; + """ + sql """ + grant select_priv on mysql.* to `${user}`; + """ + sql """ + revoke select_priv on __internal_schema.* from `${user}`; + """ + sql """ + revoke select_priv on information_schema.* from `${user}`; + """ + sql """ + revoke select_priv on mysql.* from `${user}`; + """ + try_sql("DROP USER ${user}") +} diff --git a/regression-test/suites/account_p0/test_system_role.groovy b/regression-test/suites/account_p0/test_system_role.groovy new file mode 100644 index 00000000000000..64c0f122fa21dd --- /dev/null +++ b/regression-test/suites/account_p0/test_system_role.groovy @@ -0,0 +1,61 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_system_role","p0,auth") { + test { + sql """ + drop role operator; + """ + exception "Can not drop role" + } + + test { + sql """ + drop role `admin`; + """ + exception "Can not drop role" + } + + test { + sql """ + grant select_priv on *.*.* to role "operator"; + """ + exception "Can not grant" + } + test { + sql """ + grant select_priv on *.*.* to role "admin"; + """ + exception "Can not grant" + } + test { + sql """ + revoke Node_priv on *.*.* from role 'operator'; + """ + exception "Can not revoke" + } + + test { + sql """ + revoke Admin_priv on *.*.* from role 'admin'; + """ + exception "Can not revoke" + } + +} diff --git a/regression-test/suites/account_p0/test_system_user.groovy b/regression-test/suites/account_p0/test_system_user.groovy index 1805f1669ea570..5993e1d238b444 100644 --- a/regression-test/suites/account_p0/test_system_user.groovy +++ b/regression-test/suites/account_p0/test_system_user.groovy @@ -17,7 +17,7 @@ import org.junit.Assert; -suite("test_system_user") { +suite("test_system_user","p0,auth") { test { sql """ create user `root`; @@ -36,4 +36,30 @@ suite("test_system_user") { """ exception "system" } + test { + sql """ + revoke "operator" from root; + """ + exception "Can not revoke role" + } + test { + sql """ + revoke 'admin' from `admin`; + """ + exception "Unsupported operation" + } + + sql """ + grant select_priv on *.*.* to `root`; + """ + sql """ + revoke select_priv on *.*.* from `root`; + """ + sql """ + grant select_priv on *.*.* to `admin`; + """ + sql """ + revoke select_priv on *.*.* from `admin`; + """ + } diff --git a/regression-test/suites/auth_p0/test_catalogs_auth.groovy b/regression-test/suites/auth_p0/test_catalogs_auth.groovy new file mode 100644 index 00000000000000..96ebcef7cf81cb --- /dev/null +++ b/regression-test/suites/auth_p0/test_catalogs_auth.groovy @@ -0,0 +1,68 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_catalogs_auth","p0,auth") { + String suiteName = "test_catalogs_auth" + String catalogName = "${suiteName}_catalog" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + sql """drop catalog if exists ${catalogName}""" + sql """CREATE CATALOG ${catalogName} PROPERTIES ( + "type"="es", + "hosts"="http://8.8.8.8:9200" + );""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + def showRes = sql """show catalogs;""" + logger.info("showRes: " + showRes.toString()) + assertFalse(showRes.toString().contains("${catalogName}")) + + def tvfRes = sql """select * from catalogs();""" + logger.info("tvfRes: " + tvfRes.toString()) + assertFalse(tvfRes.toString().contains("${catalogName}")) + } + + sql """grant select_priv on ${catalogName}.*.* to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + def showRes = sql """show catalogs;""" + logger.info("showRes: " + showRes.toString()) + assertTrue(showRes.toString().contains("${catalogName}")) + + def tvfRes = sql """select * from catalogs();""" + logger.info("tvfRes: " + tvfRes.toString()) + assertTrue(tvfRes.toString().contains("${catalogName}")) + } + + try_sql("DROP USER ${user}") + sql """drop catalog if exists ${catalogName}""" +} diff --git a/regression-test/suites/auth_p0/test_mtmv_auth.groovy b/regression-test/suites/auth_p0/test_mtmv_auth.groovy new file mode 100644 index 00000000000000..52ecbebb70b268 --- /dev/null +++ b/regression-test/suites/auth_p0/test_mtmv_auth.groovy @@ -0,0 +1,100 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_mtmv_auth","p0,auth") { + String suiteName = "test_mtmv_auth" + String dbName = context.config.getDbNameByFile(context.file) + String tableName = "${suiteName}_table" + String mvName = "${suiteName}_mv" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + sql """DROP MATERIALIZED VIEW IF EXISTS ${mvName};""" + sql """drop table if exists `${tableName}`""" + sql """ + CREATE TABLE `${tableName}` ( + `user_id` LARGEINT NOT NULL COMMENT '\"用户id\"', + `date` DATE NOT NULL COMMENT '\"数据灌入日期时间\"', + `num` SMALLINT NOT NULL COMMENT '\"数量\"' + ) ENGINE=OLAP + DUPLICATE KEY(`user_id`, `date`, `num`) + COMMENT 'OLAP' + DISTRIBUTED BY HASH(`user_id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1') ; + """ + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + select * from ${tableName}; + """ + + sql """refresh MATERIALIZED VIEW ${mvName} auto""" + waitingMTMVTaskFinishedByMvName(mvName) + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + def mvsRes = sql """select * from mv_infos("database"="${dbName}");""" + logger.info("mvsRes: " + mvsRes.toString()) + assertFalse(mvsRes.toString().contains("${mvName}")) + + def jobsRes = sql """select * from jobs("type"="mv");""" + logger.info("jobsRes: " + jobsRes.toString()) + assertFalse(jobsRes.toString().contains("${mvName}")) + + def tasksRes = sql """select * from tasks("type"="mv");""" + logger.info("tasksRes: " + tasksRes.toString()) + assertFalse(tasksRes.toString().contains("${mvName}")) + + } + + sql """grant select_priv on ${dbName}.${mvName} to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + def mvsRes = sql """select * from mv_infos("database"="${dbName}");""" + logger.info("mvsRes: " + mvsRes.toString()) + assertTrue(mvsRes.toString().contains("${mvName}")) + + def jobsRes = sql """select * from jobs("type"="mv");""" + logger.info("jobsRes: " + jobsRes.toString()) + assertTrue(jobsRes.toString().contains("${mvName}")) + + def tasksRes = sql """select * from tasks("type"="mv");""" + logger.info("tasksRes: " + tasksRes.toString()) + assertTrue(tasksRes.toString().contains("${mvName}")) + } + + try_sql("DROP USER ${user}") + sql """DROP MATERIALIZED VIEW IF EXISTS ${mvName};""" + sql """drop table if exists `${tableName}`""" +} diff --git a/regression-test/suites/auth_p0/test_partition_values_tvf_auth.groovy b/regression-test/suites/auth_p0/test_partition_values_tvf_auth.groovy new file mode 100644 index 00000000000000..3f0ae7ea8d524c --- /dev/null +++ b/regression-test/suites/auth_p0/test_partition_values_tvf_auth.groovy @@ -0,0 +1,69 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_partition_values_tvf_auth","p0,auth") { + String suiteName = "test_partition_values_tvf_auth" + String enabled = context.config.otherConfigs.get("enableHiveTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disable Hive test.") + return; + } + + for (String hivePrefix : ["hive3"]) { + String extHiveHmsHost = context.config.otherConfigs.get("externalEnvIp") + String extHiveHmsPort = context.config.otherConfigs.get(hivePrefix + "HmsPort") + String catalog_name = "${hivePrefix}_test_external_catalog_hive_partition" + + sql """drop catalog if exists ${catalog_name};""" + sql """ + create catalog if not exists ${catalog_name} properties ( + 'type'='hms', + 'hive.metastore.uris' = 'thrift://${extHiveHmsHost}:${extHiveHmsPort}' + ); + """ + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select * from partition_values("catalog" = "${catalog_name}", "database" = "multi_catalog", "table" = "orc_partitioned_columns") order by t_int, t_float; + """ + exception "denied" + } + } + sql """grant select_priv on ${catalog_name}.multi_catalog.orc_partitioned_columns to ${user}""" + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + select * from partition_values("catalog" = "${catalog_name}", "database" = "multi_catalog", "table" = "orc_partitioned_columns") order by t_int, t_float; + """ + } + try_sql("DROP USER ${user}") + sql """drop catalog if exists ${catalog_name}""" + } +} + diff --git a/regression-test/suites/auth_p0/test_partitions_auth.groovy b/regression-test/suites/auth_p0/test_partitions_auth.groovy new file mode 100644 index 00000000000000..0b769f11567845 --- /dev/null +++ b/regression-test/suites/auth_p0/test_partitions_auth.groovy @@ -0,0 +1,84 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_partitions_auth","p0,auth") { + String suiteName = "test_partitions_auth" + String dbName = context.config.getDbNameByFile(context.file) + String tableName = "${suiteName}_table" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + sql """drop table if exists `${tableName}`""" + sql """ + CREATE TABLE `${tableName}` ( + `user_id` LARGEINT NOT NULL COMMENT '\"用户id\"', + `date` DATE NOT NULL COMMENT '\"数据灌入日期时间\"', + `num` SMALLINT NOT NULL COMMENT '\"数量\"' + ) ENGINE=OLAP + DUPLICATE KEY(`user_id`, `date`, `num`) + COMMENT 'OLAP' + PARTITION BY RANGE(`date`) + (PARTITION p201701_1000 VALUES [('0000-01-01'), ('2017-02-01')), + PARTITION p201702_2000 VALUES [('2017-02-01'), ('2017-03-01')), + PARTITION p201703_all VALUES [('2017-03-01'), ('2017-04-01'))) + DISTRIBUTED BY HASH(`user_id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1') ; + """ + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + show partitions from ${dbName}.${tableName}; + """ + exception "denied" + } + test { + sql """ + select * from partitions('catalog'='internal',"database"="${dbName}","table"="${tableName}"); + """ + exception "denied" + } + } + + sql """grant select_priv on ${dbName}.${tableName} to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + show partitions from ${dbName}.${tableName}; + """ + sql """ + select * from partitions('catalog'='internal',"database"="${dbName}","table"="${tableName}"); + """ + } + + try_sql("DROP USER ${user}") + sql """drop table if exists `${tableName}`""" +} diff --git a/regression-test/suites/auth_p0/test_query_tvf_auth.groovy b/regression-test/suites/auth_p0/test_query_tvf_auth.groovy new file mode 100644 index 00000000000000..05c274077d9eb3 --- /dev/null +++ b/regression-test/suites/auth_p0/test_query_tvf_auth.groovy @@ -0,0 +1,74 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_jdbc_query_tvf","p0,auth") { + String suiteName = "test_jdbc_query_tvf" + String enabled = context.config.otherConfigs.get("enableJdbcTest") + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + String s3_endpoint = getS3Endpoint() + String bucket = getS3BucketName() + String driver_url = "https://${bucket}.${s3_endpoint}/regression/jdbc_driver/mysql-connector-java-8.0.25.jar" + if (enabled != null && enabled.equalsIgnoreCase("true")) { + String user = "test_jdbc_user"; + String pwd = '123456'; + String catalog_name = "${suiteName}_catalog" + String mysql_port = context.config.otherConfigs.get("mysql_57_port"); + + sql """drop catalog if exists ${catalog_name} """ + + sql """create catalog if not exists ${catalog_name} properties( + "type"="jdbc", + "user"="root", + "password"="123456", + "jdbc_url" = "jdbc:mysql://${externalEnvIp}:${mysql_port}/doris_test", + "driver_url" = "${driver_url}", + "driver_class" = "com.mysql.cj.jdbc.Driver" + );""" + + String dorisuser = "${suiteName}_user" + String dorispwd = 'C123_567p' + try_sql("DROP USER ${dorisuser}") + sql """CREATE USER '${dorisuser}' IDENTIFIED BY '${dorispwd}'""" + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${dorisuser}"""; + } + + sql """grant select_priv on regression_test to ${dorisuser}""" + + connect(user=dorisuser, password="${dorispwd}", url=context.config.jdbcUrl) { + test { + sql """ + select * from query('catalog' = '${catalog_name}', 'query' = 'select * from doris_test.all_types'); + """ + exception "denied" + } + } + sql """grant select_priv on ${catalog_name}.*.* to ${dorisuser}""" + connect(user=dorisuser, password="${dorispwd}", url=context.config.jdbcUrl) { + sql """ + select * from query('catalog' = '${catalog_name}', 'query' = 'select * from doris_test.all_types'); + """ + } + try_sql("DROP USER ${dorisuser}") + sql """drop catalog if exists ${catalog_name} """ + } +} + diff --git a/regression-test/suites/auth_p0/test_select_count_auth.groovy b/regression-test/suites/auth_p0/test_select_count_auth.groovy new file mode 100644 index 00000000000000..ccea1a4a580098 --- /dev/null +++ b/regression-test/suites/auth_p0/test_select_count_auth.groovy @@ -0,0 +1,93 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_select_count_auth","p0,auth") { + String suiteName = "test_select_count_auth" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select count(*) from __internal_schema.audit_log; + """ + exception "denied" + } + test { + sql """ + select count(1) from __internal_schema.audit_log; + """ + exception "denied" + } + test { + sql """ + select count(query_id) from __internal_schema.audit_log; + """ + exception "denied" + } + } + + sql """grant select_priv(query_id) on __internal_schema.audit_log to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select count(*) from __internal_schema.audit_log; + """ + exception "denied" + } + test { + sql """ + select count(1) from __internal_schema.audit_log; + """ + exception "denied" + } + sql """ + select count(query_id) from __internal_schema.audit_log; + """ + } + + sql """grant select_priv on __internal_schema.audit_log to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + select count(*) from __internal_schema.audit_log; + """ + sql """ + select count(1) from __internal_schema.audit_log; + """ + sql """ + select count(query_id) from __internal_schema.audit_log; + """ + } + + try_sql("DROP USER ${user}") +} diff --git a/regression-test/suites/external_table_p2/tvf/test_iceberg_meta.groovy b/regression-test/suites/external_table_p2/tvf/test_iceberg_meta.groovy index 047b4a36fe2622..557eaf5b061d70 100644 --- a/regression-test/suites/external_table_p2/tvf/test_iceberg_meta.groovy +++ b/regression-test/suites/external_table_p2/tvf/test_iceberg_meta.groovy @@ -16,7 +16,7 @@ // under the License. suite("test_iceberg_meta", "p2,external,iceberg,external_remote,external_remote_iceberg") { - + String suiteName = "test_iceberg_meta" Boolean ignoreP2 = true; if (ignoreP2) { logger.info("disable p2 test"); @@ -54,5 +54,37 @@ suite("test_iceberg_meta", "p2,external,iceberg,external_remote,external_remote_ "query_type" = "snapshots") where snapshot_id = 7235593032487457798; """ + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select committed_at, snapshot_id, parent_id, operation from iceberg_meta( + "table" = "${iceberg_catalog_name}.${db}.multi_partition", + "query_type" = "snapshots"); + """ + exception "denied" + } + } + sql """grant select_priv on ${iceberg_catalog_name}.${db}.multi_partition to ${user}""" + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + select committed_at, snapshot_id, parent_id, operation from iceberg_meta( + "table" = "${iceberg_catalog_name}.${db}.multi_partition", + "query_type" = "snapshots"); + """ + } + try_sql("DROP USER ${user}") } } From fa973507a215e48bcdae12437e66f8a661b4643d Mon Sep 17 00:00:00 2001 From: zhangdong Date: Mon, 23 Dec 2024 19:09:17 +0800 Subject: [PATCH 64/82] [enhance](mtmv)MTMV allow paimon table has multi partition keys (#45652) ### What problem does this PR solve? - MTMV allow paimon table has multi partition keys - add case --- .../paimon/run01.sql | 35 +++- .../info/MTMVPartitionDefinition.java | 6 +- .../data/mtmv_p0/test_paimon_mtmv.out | 29 ++++ .../mtmv_p0/test_paimon_olap_rewrite_mtmv.out | 79 +++++++++ .../suites/mtmv_p0/test_paimon_mtmv.groovy | 155 +++++++++++++++++- .../test_paimon_olap_rewrite_mtmv.groovy | 115 +++++++++++++ .../mtmv_p0/test_paimon_rewrite_mtmv.groovy | 3 + .../test_partition_refresh_mtmv.groovy | 25 ++- 8 files changed, 427 insertions(+), 20 deletions(-) create mode 100644 regression-test/data/mtmv_p0/test_paimon_olap_rewrite_mtmv.out create mode 100644 regression-test/suites/mtmv_p0/test_paimon_olap_rewrite_mtmv.groovy diff --git a/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql b/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql index 7aa4170eab0985..5cc0a0ea685e37 100644 --- a/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql +++ b/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql @@ -22,4 +22,37 @@ insert into test_tb_mix_format values (1,1,'b'),(2,1,'b'),(3,1,'b'),(4,1,'b'),(5 -- update some data, these splits will be readed by jni insert into test_tb_mix_format values (1,2,'b'),(2,2,'b'),(3,2,'b'),(4,2,'b'),(5,2,'b'); -- delete foramt in table properties, doris should get format by file name -alter table test_tb_mix_format unset TBLPROPERTIES ('file.format'); \ No newline at end of file +alter table test_tb_mix_format unset TBLPROPERTIES ('file.format'); + +drop table if exists two_partition; +CREATE TABLE two_partition ( + id BIGINT, + create_date STRING, + region STRING +) PARTITIONED BY (create_date,region) TBLPROPERTIES ( + 'primary-key' = 'create_date,region,id', + 'bucket'=10, + 'file.format'='orc' +); + +insert into two_partition values(1,'2020-01-01','bj'); +insert into two_partition values(2,'2020-01-01','sh'); +insert into two_partition values(3,'2038-01-01','bj'); +insert into two_partition values(4,'2038-01-01','sh'); +insert into two_partition values(5,'2038-01-02','bj'); + +drop table if exists null_partition; +CREATE TABLE null_partition ( + id BIGINT, + region STRING +) PARTITIONED BY (region) TBLPROPERTIES ( + 'primary-key' = 'region,id', + 'bucket'=10, + 'file.format'='orc' +); +-- null NULL "null" all will be in partition [null] +insert into null_partition values(1,'bj'); +insert into null_partition values(2,null); +insert into null_partition values(3,NULL); +insert into null_partition values(4,'null'); +insert into null_partition values(5,'NULL'); \ No newline at end of file diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java index c2e9abd2f0f97c..8624bed9ceefae 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/info/MTMVPartitionDefinition.java @@ -25,8 +25,8 @@ import org.apache.doris.analysis.FunctionParams; import org.apache.doris.analysis.SlotRef; import org.apache.doris.analysis.StringLiteral; +import org.apache.doris.catalog.PartitionType; import org.apache.doris.common.DdlException; -import org.apache.doris.datasource.hive.HMSExternalTable; import org.apache.doris.mtmv.MTMVPartitionExprFactory; import org.apache.doris.mtmv.MTMVPartitionInfo; import org.apache.doris.mtmv.MTMVPartitionInfo.MTMVPartitionType; @@ -136,9 +136,9 @@ private RelatedTableInfo getRelatedTableInfo(NereidsPlanner planner, String part if (!partitionColumnNames.contains(relatedTableInfo.getColumn())) { throw new AnalysisException("error related column: " + relatedTableInfo.getColumn()); } - if (!(mtmvBaseRelatedTable instanceof HMSExternalTable) + if (!(mtmvBaseRelatedTable.getPartitionType(Optional.empty()).equals(PartitionType.LIST)) && partitionColumnNames.size() != 1) { - throw new AnalysisException("only hms table support multi column partition."); + throw new AnalysisException("only List PartitionType support multi column partition."); } return relatedTableInfo; } diff --git a/regression-test/data/mtmv_p0/test_paimon_mtmv.out b/regression-test/data/mtmv_p0/test_paimon_mtmv.out index c28b7cb7baca22..ba6fc06c1d2491 100644 --- a/regression-test/data/mtmv_p0/test_paimon_mtmv.out +++ b/regression-test/data/mtmv_p0/test_paimon_mtmv.out @@ -111,3 +111,32 @@ false -- !not_partition_after -- true +-- !join_one_partition -- +1 2 a 1 2 +10 1 a \N \N +2 2 a \N \N +3 2 a \N \N +4 2 a \N \N +5 2 a \N \N +6 1 a \N \N +7 1 a \N \N +8 1 a \N \N +9 1 a \N \N + +-- !two_partition -- +1 2020-01-01 bj +2 2020-01-01 sh +3 2038-01-01 bj +4 2038-01-01 sh +5 2038-01-02 bj + +-- !limit_partition -- +3 2038-01-01 bj +4 2038-01-01 sh +5 2038-01-02 bj + +-- !null_partition -- +1 bj +4 null +5 NULL + diff --git a/regression-test/data/mtmv_p0/test_paimon_olap_rewrite_mtmv.out b/regression-test/data/mtmv_p0/test_paimon_olap_rewrite_mtmv.out new file mode 100644 index 00000000000000..09d23b7736e1d8 --- /dev/null +++ b/regression-test/data/mtmv_p0/test_paimon_olap_rewrite_mtmv.out @@ -0,0 +1,79 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !refresh_one_partition -- +1 2 a 1 2 +10 1 a \N \N +2 2 a \N \N +3 2 a \N \N +4 2 a \N \N +5 2 a \N \N +6 1 a \N \N +7 1 a \N \N +8 1 a \N \N +9 1 a \N \N + +-- !refresh_one_partition_rewrite -- +1 2 a 1 2 +1 2 b 1 2 +10 1 a \N \N +10 1 b \N \N +2 2 a \N \N +2 2 b \N \N +3 2 a \N \N +3 2 b \N \N +4 2 a \N \N +4 2 b \N \N +5 2 a \N \N +5 2 b \N \N +6 1 a \N \N +6 1 b \N \N +7 1 a \N \N +7 1 b \N \N +8 1 a \N \N +8 1 b \N \N +9 1 a \N \N +9 1 b \N \N + +-- !refresh_auto -- +1 2 a 1 2 +1 2 b 1 2 +10 1 a \N \N +10 1 b \N \N +2 2 a \N \N +2 2 b \N \N +3 2 a \N \N +3 2 b \N \N +4 2 a \N \N +4 2 b \N \N +5 2 a \N \N +5 2 b \N \N +6 1 a \N \N +6 1 b \N \N +7 1 a \N \N +7 1 b \N \N +8 1 a \N \N +8 1 b \N \N +9 1 a \N \N +9 1 b \N \N + +-- !refresh_all_partition_rewrite -- +1 2 a 1 2 +1 2 b 1 2 +10 1 a \N \N +10 1 b \N \N +2 2 a \N \N +2 2 b \N \N +3 2 a \N \N +3 2 b \N \N +4 2 a \N \N +4 2 b \N \N +5 2 a \N \N +5 2 b \N \N +6 1 a \N \N +6 1 b \N \N +7 1 a \N \N +7 1 b \N \N +8 1 a \N \N +8 1 b \N \N +9 1 a \N \N +9 1 b \N \N + diff --git a/regression-test/suites/mtmv_p0/test_paimon_mtmv.groovy b/regression-test/suites/mtmv_p0/test_paimon_mtmv.groovy index f2989edbf6dfd6..48d63e03ec3db5 100644 --- a/regression-test/suites/mtmv_p0/test_paimon_mtmv.groovy +++ b/regression-test/suites/mtmv_p0/test_paimon_mtmv.groovy @@ -25,6 +25,24 @@ suite("test_paimon_mtmv", "p0,external,mtmv,external_docker,external_docker_dori String catalogName = "${suiteName}_catalog" String mvName = "${suiteName}_mv" String dbName = context.config.getDbNameByFile(context.file) + String otherDbName = "${suiteName}_otherdb" + String tableName = "${suiteName}_table" + + sql """drop database if exists ${otherDbName}""" + sql """create database ${otherDbName}""" + sql """ + CREATE TABLE ${otherDbName}.${tableName} ( + `user_id` INT, + `num` INT + ) ENGINE=OLAP + DUPLICATE KEY(`user_id`) + DISTRIBUTED BY HASH(`user_id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1') ; + """ + + sql """ + insert into ${otherDbName}.${tableName} values(1,2); + """ String minio_port = context.config.otherConfigs.get("iceberg_minio_port") String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") @@ -99,8 +117,10 @@ suite("test_paimon_mtmv", "p0,external,mtmv,external_docker,external_docker_dori sql """ CREATE MATERIALIZED VIEW ${mvName} BUILD DEFERRED REFRESH AUTO ON MANUAL - DISTRIBUTED BY RANDOM BUCKETS 2 - PROPERTIES ('replication_num' = '1') + KEY(`id`) + COMMENT "comment1" + DISTRIBUTED BY HASH(`id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1',"grace_period"="333") AS SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format; """ @@ -113,6 +133,137 @@ suite("test_paimon_mtmv", "p0,external,mtmv,external_docker,external_docker_dori order_qt_not_partition "SELECT * FROM ${mvName} " order_qt_not_partition_after "select SyncWithBaseTables from mv_infos('database'='${dbName}') where Name='${mvName}'" sql """drop materialized view if exists ${mvName};""" + + // refresh on schedule + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD IMMEDIATE REFRESH COMPLETE ON SCHEDULE EVERY 10 SECOND STARTS "9999-12-13 21:07:09" + KEY(`id`) + COMMENT "comment1" + DISTRIBUTED BY HASH(`id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1',"grace_period"="333") + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format; + """ + waitingMTMVTaskFinishedByMvName(mvName) + sql """drop materialized view if exists ${mvName};""" + + // refresh on schedule + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD IMMEDIATE REFRESH AUTO ON commit + KEY(`id`) + COMMENT "comment1" + DISTRIBUTED BY HASH(`id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1',"grace_period"="333") + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format; + """ + waitingMTMVTaskFinishedByMvName(mvName) + sql """drop materialized view if exists ${mvName};""" + + // cross db and join internal table + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`par`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format a left join internal.${otherDbName}.${tableName} b on a.id=b.user_id; + """ + def showJoinPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showJoinPartitionsResult: " + showJoinPartitionsResult.toString()) + assertTrue(showJoinPartitionsResult.toString().contains("p_a")) + assertTrue(showJoinPartitionsResult.toString().contains("p_b")) + + sql """ + REFRESH MATERIALIZED VIEW ${mvName} partitions(p_a); + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_join_one_partition "SELECT * FROM ${mvName} " + sql """drop materialized view if exists ${mvName};""" + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`create_date`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.two_partition; + """ + def showTwoPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showTwoPartitionsResult: " + showTwoPartitionsResult.toString()) + assertTrue(showTwoPartitionsResult.toString().contains("p_20200101")) + assertTrue(showTwoPartitionsResult.toString().contains("p_20380101")) + assertTrue(showTwoPartitionsResult.toString().contains("p_20380102")) + sql """ + REFRESH MATERIALIZED VIEW ${mvName} auto; + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_two_partition "SELECT * FROM ${mvName} " + sql """drop materialized view if exists ${mvName};""" + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`create_date`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1','partition_sync_limit'='2','partition_date_format'='%Y-%m-%d', + 'partition_sync_time_unit'='MONTH') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.two_partition; + """ + def showLimitPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showLimitPartitionsResult: " + showLimitPartitionsResult.toString()) + assertFalse(showLimitPartitionsResult.toString().contains("p_20200101")) + assertTrue(showLimitPartitionsResult.toString().contains("p_20380101")) + assertTrue(showLimitPartitionsResult.toString().contains("p_20380102")) + sql """ + REFRESH MATERIALIZED VIEW ${mvName} auto; + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_limit_partition "SELECT * FROM ${mvName} " + sql """drop materialized view if exists ${mvName};""" + + // not allow date trunc + test { + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by (date_trunc(`create_date`,'month')) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1','partition_sync_limit'='2','partition_date_format'='%Y-%m-%d', + 'partition_sync_time_unit'='MONTH') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.two_partition; + """ + exception "only support" + } + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`region`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + SELECT * FROM ${catalogName}.`test_paimon_spark`.null_partition; + """ + def showNullPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showNullPartitionsResult: " + showNullPartitionsResult.toString()) + assertTrue(showNullPartitionsResult.toString().contains("p_null")) + assertTrue(showNullPartitionsResult.toString().contains("p_NULL")) + assertTrue(showNullPartitionsResult.toString().contains("p_bj")) + sql """ + REFRESH MATERIALIZED VIEW ${mvName} auto; + """ + waitingMTMVTaskFinishedByMvName(mvName) + // Will lose null data + order_qt_null_partition "SELECT * FROM ${mvName} " + sql """drop materialized view if exists ${mvName};""" + sql """drop catalog if exists ${catalogName}""" } diff --git a/regression-test/suites/mtmv_p0/test_paimon_olap_rewrite_mtmv.groovy b/regression-test/suites/mtmv_p0/test_paimon_olap_rewrite_mtmv.groovy new file mode 100644 index 00000000000000..a3ac1c048d30da --- /dev/null +++ b/regression-test/suites/mtmv_p0/test_paimon_olap_rewrite_mtmv.groovy @@ -0,0 +1,115 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_paimon_olap_rewrite_mtmv", "p0,external,mtmv,external_docker,external_docker_doris") { + String enabled = context.config.otherConfigs.get("enablePaimonTest") + if (enabled == null || !enabled.equalsIgnoreCase("true")) { + logger.info("disabled paimon test") + return + } + String suiteName = "test_paimon_olap_rewrite_mtmv" + String catalogName = "${suiteName}_catalog" + String mvName = "${suiteName}_mv" + String dbName = context.config.getDbNameByFile(context.file) + String tableName = "${suiteName}_table" + sql """drop table if exists ${tableName}""" + sql """ + CREATE TABLE ${tableName} ( + `user_id` INT, + `num` INT + ) ENGINE=OLAP + DUPLICATE KEY(`user_id`) + DISTRIBUTED BY HASH(`user_id`) BUCKETS 2 + PROPERTIES ('replication_num' = '1') ; + """ + sql """ + insert into ${tableName} values(1,2); + """ + + sql """analyze table internal.`${dbName}`. ${tableName} with sync""" + sql """alter table internal.`${dbName}`. ${tableName} modify column user_id set stats ('row_count'='1');""" + + String minio_port = context.config.otherConfigs.get("iceberg_minio_port") + String externalEnvIp = context.config.otherConfigs.get("externalEnvIp") + + sql """set materialized_view_rewrite_enable_contain_external_table=true;""" + String mvSql = "SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format a left join ${tableName} b on a.id=b.user_id;"; + + sql """drop catalog if exists ${catalogName}""" + sql """CREATE CATALOG ${catalogName} PROPERTIES ( + 'type'='paimon', + 'warehouse' = 's3://warehouse/wh/', + "s3.access_key" = "admin", + "s3.secret_key" = "password", + "s3.endpoint" = "http://${externalEnvIp}:${minio_port}", + "s3.region" = "us-east-1" + );""" + + sql """analyze table ${catalogName}.`test_paimon_spark`.test_tb_mix_format with sync""" + sql """alter table ${catalogName}.`test_paimon_spark`.test_tb_mix_format modify column par set stats ('row_count'='20');""" + + sql """drop materialized view if exists ${mvName};""" + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`par`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + ${mvSql} + """ + def showPartitionsResult = sql """show partitions from ${mvName}""" + logger.info("showPartitionsResult: " + showPartitionsResult.toString()) + assertTrue(showPartitionsResult.toString().contains("p_a")) + assertTrue(showPartitionsResult.toString().contains("p_b")) + + // refresh one partitions + sql """ + REFRESH MATERIALIZED VIEW ${mvName} partitions(p_a); + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_refresh_one_partition "SELECT * FROM ${mvName} " + + def explainOnePartition = sql """ explain ${mvSql} """ + logger.info("explainOnePartition: " + explainOnePartition.toString()) + assertTrue(explainOnePartition.toString().contains("VUNION")) + order_qt_refresh_one_partition_rewrite "${mvSql}" + + mv_rewrite_success("${mvSql}", "${mvName}") + + // select p_b should not rewrite + mv_rewrite_fail("SELECT * FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format a left join ${tableName} b on a.id=b.user_id where a.par='b';", "${mvName}") + + //refresh auto + sql """ + REFRESH MATERIALIZED VIEW ${mvName} auto + """ + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_refresh_auto "SELECT * FROM ${mvName} " + + def explainAllPartition = sql """ explain ${mvSql}; """ + logger.info("explainAllPartition: " + explainAllPartition.toString()) + assertTrue(explainAllPartition.toString().contains("VOlapScanNode")) + order_qt_refresh_all_partition_rewrite "${mvSql}" + + mv_rewrite_success("${mvSql}", "${mvName}") + + sql """drop materialized view if exists ${mvName};""" + sql """drop catalog if exists ${catalogName}""" +} + diff --git a/regression-test/suites/mtmv_p0/test_paimon_rewrite_mtmv.groovy b/regression-test/suites/mtmv_p0/test_paimon_rewrite_mtmv.groovy index 985443875c7b26..22a94d46635169 100644 --- a/regression-test/suites/mtmv_p0/test_paimon_rewrite_mtmv.groovy +++ b/regression-test/suites/mtmv_p0/test_paimon_rewrite_mtmv.groovy @@ -75,6 +75,9 @@ suite("test_paimon_rewrite_mtmv", "p0,external,mtmv,external_docker,external_doc mv_rewrite_success("${mvSql}", "${mvName}") + // select p_b should not rewrite + mv_rewrite_fail("SELECT par,count(*) as num FROM ${catalogName}.`test_paimon_spark`.test_tb_mix_format where par='b' group by par;", "${mvName}") + //refresh auto sql """ REFRESH MATERIALIZED VIEW ${mvName} auto diff --git a/regression-test/suites/mtmv_p0/test_partition_refresh_mtmv.groovy b/regression-test/suites/mtmv_p0/test_partition_refresh_mtmv.groovy index 8e084091f4d15a..21296fc5878874 100644 --- a/regression-test/suites/mtmv_p0/test_partition_refresh_mtmv.groovy +++ b/regression-test/suites/mtmv_p0/test_partition_refresh_mtmv.groovy @@ -113,20 +113,17 @@ suite("test_partition_refresh_mtmv") { PROPERTIES ('replication_num' = '1') ; """ - try { - sql """ - CREATE MATERIALIZED VIEW ${mvName} - BUILD DEFERRED REFRESH AUTO ON MANUAL - partition by(`date`) - DISTRIBUTED BY RANDOM BUCKETS 2 - PROPERTIES ('replication_num' = '1') - AS - SELECT * FROM ${tableNameNum}; - """ - Assert.fail(); - } catch (Exception e) { - log.info(e.getMessage()) - } + + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`date`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS + SELECT * FROM ${tableNameNum}; + """ + sql """drop table if exists `${tableNameNum}`""" sql """drop materialized view if exists ${mvName};""" From 2bfab8fd2297ed3d79feef83015a147f76c4f49b Mon Sep 17 00:00:00 2001 From: zhangdong Date: Mon, 23 Dec 2024 19:20:31 +0800 Subject: [PATCH 65/82] [fix](auth)fix some tvf not check auth (#45483) ### What problem does this PR solve? Problem Summary: fix some tvf not check auth - backends() support check auth - frontends() support check auth - frontends_disks() support check auth ### Release note Change the privilege required for TVF backends, frontends and frontends_disks. Before, no privilege required for them. from now on, ADMIN_PRIV or NODE_PRIV is required. --- .../BackendsTableValuedFunction.java | 10 +++ .../FrontendsDisksTableValuedFunction.java | 10 +++ .../FrontendsTableValuedFunction.java | 10 +++ .../suites/auth_p0/test_backends_auth.groovy | 64 +++++++++++++++++++ .../suites/auth_p0/test_frontends_auth.groovy | 64 +++++++++++++++++++ .../auth_p0/test_frontends_disks_auth.groovy | 55 ++++++++++++++++ 6 files changed, 213 insertions(+) create mode 100644 regression-test/suites/auth_p0/test_backends_auth.groovy create mode 100644 regression-test/suites/auth_p0/test_frontends_auth.groovy create mode 100644 regression-test/suites/auth_p0/test_frontends_disks_auth.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/BackendsTableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/BackendsTableValuedFunction.java index 04ea7d01eae3dd..817bfefafdfbc3 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/BackendsTableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/BackendsTableValuedFunction.java @@ -18,9 +18,13 @@ package org.apache.doris.tablefunction; import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.Env; import org.apache.doris.catalog.PrimitiveType; import org.apache.doris.catalog.ScalarType; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.qe.ConnectContext; import org.apache.doris.thrift.TBackendsMetadataParams; import org.apache.doris.thrift.TMetaScanRange; import org.apache.doris.thrift.TMetadataType; @@ -83,6 +87,12 @@ public BackendsTableValuedFunction(Map params) throws AnalysisEx if (params.size() != 0) { throw new AnalysisException("backends table-valued-function does not support any params"); } + if (!Env.getCurrentEnv().getAccessManager() + .checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN_OR_NODE)) { + String message = ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR.formatErrorMsg( + PrivPredicate.ADMIN_OR_NODE.getPrivs().toString()); + throw new AnalysisException(message); + } } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsDisksTableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsDisksTableValuedFunction.java index cc7ff82b8fb0e1..2c898a57afe96d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsDisksTableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsDisksTableValuedFunction.java @@ -18,8 +18,12 @@ package org.apache.doris.tablefunction; import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.Env; import org.apache.doris.catalog.ScalarType; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.qe.ConnectContext; import org.apache.doris.thrift.TFrontendsMetadataParams; import org.apache.doris.thrift.TMetaScanRange; import org.apache.doris.thrift.TMetadataType; @@ -67,6 +71,12 @@ public FrontendsDisksTableValuedFunction(Map params) throws Anal if (params.size() != 0) { throw new AnalysisException("frontends_disks table-valued-function does not support any params"); } + if (!Env.getCurrentEnv().getAccessManager() + .checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN_OR_NODE)) { + String message = ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR.formatErrorMsg( + PrivPredicate.ADMIN_OR_NODE.getPrivs().toString()); + throw new AnalysisException(message); + } } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsTableValuedFunction.java b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsTableValuedFunction.java index aded1076a83d03..a9f48b6d1ff3d1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsTableValuedFunction.java +++ b/fe/fe-core/src/main/java/org/apache/doris/tablefunction/FrontendsTableValuedFunction.java @@ -18,8 +18,12 @@ package org.apache.doris.tablefunction; import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.Env; import org.apache.doris.catalog.ScalarType; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.mysql.privilege.PrivPredicate; import org.apache.doris.nereids.exceptions.AnalysisException; +import org.apache.doris.qe.ConnectContext; import org.apache.doris.thrift.TFrontendsMetadataParams; import org.apache.doris.thrift.TMetaScanRange; import org.apache.doris.thrift.TMetadataType; @@ -76,6 +80,12 @@ public FrontendsTableValuedFunction(Map params) throws AnalysisE if (params.size() != 0) { throw new AnalysisException("frontends table-valued-function does not support any params"); } + if (!Env.getCurrentEnv().getAccessManager() + .checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN_OR_NODE)) { + String message = ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR.formatErrorMsg( + PrivPredicate.ADMIN_OR_NODE.getPrivs().toString()); + throw new AnalysisException(message); + } } @Override diff --git a/regression-test/suites/auth_p0/test_backends_auth.groovy b/regression-test/suites/auth_p0/test_backends_auth.groovy new file mode 100644 index 00000000000000..753ae837c776e9 --- /dev/null +++ b/regression-test/suites/auth_p0/test_backends_auth.groovy @@ -0,0 +1,64 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_backends_auth","p0,auth") { + String suiteName = "test_backends_auth" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + show backends; + """ + exception "denied" + } + test { + sql """ + select * from backends(); + """ + exception "denied" + } + } + + sql """grant admin_priv on *.*.* to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + show backends; + """ + sql """ + select * from backends(); + """ + } + + try_sql("DROP USER ${user}") +} \ No newline at end of file diff --git a/regression-test/suites/auth_p0/test_frontends_auth.groovy b/regression-test/suites/auth_p0/test_frontends_auth.groovy new file mode 100644 index 00000000000000..21fff527518e2b --- /dev/null +++ b/regression-test/suites/auth_p0/test_frontends_auth.groovy @@ -0,0 +1,64 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_frontends_auth","p0,auth") { + String suiteName = "test_frontends_auth" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + show frontends; + """ + exception "denied" + } + test { + sql """ + select * from frontends(); + """ + exception "denied" + } + } + + sql """grant admin_priv on *.*.* to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + show frontends; + """ + sql """ + select * from frontends(); + """ + } + + try_sql("DROP USER ${user}") +} \ No newline at end of file diff --git a/regression-test/suites/auth_p0/test_frontends_disks_auth.groovy b/regression-test/suites/auth_p0/test_frontends_disks_auth.groovy new file mode 100644 index 00000000000000..3767fdde0a5e92 --- /dev/null +++ b/regression-test/suites/auth_p0/test_frontends_disks_auth.groovy @@ -0,0 +1,55 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +import org.junit.Assert; + +suite("test_frontends_disks_auth","p0,auth") { + String suiteName = "test_frontends_disks_auth" + String user = "${suiteName}_user" + String pwd = 'C123_567p' + try_sql("DROP USER ${user}") + sql """CREATE USER '${user}' IDENTIFIED BY '${pwd}'""" + + //cloud-mode + if (isCloudMode()) { + def clusters = sql " SHOW CLUSTERS; " + assertTrue(!clusters.isEmpty()) + def validCluster = clusters[0][0] + sql """GRANT USAGE_PRIV ON CLUSTER ${validCluster} TO ${user}"""; + } + + sql """grant select_priv on regression_test to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + test { + sql """ + select * from frontends_disks(); + """ + exception "denied" + } + } + + sql """grant admin_priv on *.*.* to ${user}""" + + connect(user=user, password="${pwd}", url=context.config.jdbcUrl) { + sql """ + select * from frontends_disks(); + """ + } + + try_sql("DROP USER ${user}") +} \ No newline at end of file From b22045c23487f6b0aa52c9e33ca0883d0c46f05a Mon Sep 17 00:00:00 2001 From: minghong Date: Mon, 23 Dec 2024 20:29:03 +0800 Subject: [PATCH 66/82] [opt](regression) reorganize shape-check cases (#45766) ### What problem does this PR solve? remove duplicated shape check cases mv all shape check cases into one directory --- .../data/nereids_hint_tpch_p0/shape/q15.out | 35 - .../data/new_shapes_p0/clickbench/query1.out | 9 - .../data/new_shapes_p0/clickbench/query10.out | 12 - .../data/new_shapes_p0/clickbench/query11.out | 13 - .../data/new_shapes_p0/clickbench/query12.out | 13 - .../data/new_shapes_p0/clickbench/query13.out | 13 - .../data/new_shapes_p0/clickbench/query14.out | 13 - .../data/new_shapes_p0/clickbench/query15.out | 13 - .../data/new_shapes_p0/clickbench/query16.out | 10 - .../data/new_shapes_p0/clickbench/query17.out | 10 - .../data/new_shapes_p0/clickbench/query18.out | 10 - .../data/new_shapes_p0/clickbench/query19.out | 10 - .../data/new_shapes_p0/clickbench/query2.out | 10 - .../data/new_shapes_p0/clickbench/query20.out | 8 - .../data/new_shapes_p0/clickbench/query21.out | 10 - .../data/new_shapes_p0/clickbench/query22.out | 13 - .../data/new_shapes_p0/clickbench/query23.out | 13 - .../data/new_shapes_p0/clickbench/query24.out | 9 - .../data/new_shapes_p0/clickbench/query25.out | 11 - .../data/new_shapes_p0/clickbench/query26.out | 10 - .../data/new_shapes_p0/clickbench/query27.out | 11 - .../data/new_shapes_p0/clickbench/query28.out | 14 - .../data/new_shapes_p0/clickbench/query29.out | 14 - .../data/new_shapes_p0/clickbench/query3.out | 9 - .../data/new_shapes_p0/clickbench/query30.out | 10 - .../data/new_shapes_p0/clickbench/query31.out | 13 - .../data/new_shapes_p0/clickbench/query32.out | 13 - .../data/new_shapes_p0/clickbench/query33.out | 12 - .../data/new_shapes_p0/clickbench/query34.out | 12 - .../data/new_shapes_p0/clickbench/query35.out | 13 - .../data/new_shapes_p0/clickbench/query36.out | 13 - .../data/new_shapes_p0/clickbench/query37.out | 13 - .../data/new_shapes_p0/clickbench/query38.out | 13 - .../data/new_shapes_p0/clickbench/query39.out | 13 - .../data/new_shapes_p0/clickbench/query4.out | 9 - .../data/new_shapes_p0/clickbench/query40.out | 14 - .../data/new_shapes_p0/clickbench/query41.out | 13 - .../data/new_shapes_p0/clickbench/query42.out | 13 - .../data/new_shapes_p0/clickbench/query43.out | 13 - .../data/new_shapes_p0/clickbench/query5.out | 11 - .../data/new_shapes_p0/clickbench/query6.out | 12 - .../data/new_shapes_p0/clickbench/query7.out | 9 - .../data/new_shapes_p0/clickbench/query8.out | 13 - .../data/new_shapes_p0/clickbench/query9.out | 12 - .../new_shapes_p0/hint_tpcds/shape/query1.out | 42 - .../hint_tpcds/shape/query24.out | 56 - .../hint_tpcds/shape/query64.out | 106 - .../hint_tpcds/shape/query67.out | 37 - .../hint_tpcds/shape/query72.out | 59 - .../hint_tpcds/shape/query78.out | 62 - .../new_shapes_p0/hint_tpch/shape/q10.out | 31 - .../new_shapes_p0/hint_tpch/shape/q11.out | 42 - .../new_shapes_p0/hint_tpch/shape/q12.out | 22 - .../new_shapes_p0/hint_tpch/shape/q13.out | 24 - .../new_shapes_p0/hint_tpch/shape/q14.out | 20 - .../new_shapes_p0/hint_tpch/shape/q15.out | 35 - .../new_shapes_p0/hint_tpch/shape/q17.out | 25 - .../new_shapes_p0/hint_tpch/shape/q19.out | 20 - .../data/new_shapes_p0/hint_tpch/shape/q3.out | 26 - .../data/new_shapes_p0/hint_tpch/shape/q4.out | 23 - .../data/new_shapes_p0/hint_tpch/shape/q5.out | 39 - .../data/new_shapes_p0/hint_tpch/shape/q7.out | 40 - .../data/new_shapes_p0/hint_tpch/shape/q8.out | 49 - .../data/new_shapes_p0/hint_tpch/shape/q9.out | 38 - .../new_shapes_p0/ssb_sf100/shape/flat.out | 15 - .../new_shapes_p0/ssb_sf100/shape/q1.1.out | 15 - .../new_shapes_p0/ssb_sf100/shape/q1.2.out | 15 - .../new_shapes_p0/ssb_sf100/shape/q1.3.out | 15 - .../new_shapes_p0/ssb_sf100/shape/q2.1.out | 26 - .../new_shapes_p0/ssb_sf100/shape/q2.2.out | 26 - .../new_shapes_p0/ssb_sf100/shape/q2.3.out | 26 - .../new_shapes_p0/ssb_sf100/shape/q3.1.out | 27 - .../new_shapes_p0/ssb_sf100/shape/q3.2.out | 27 - .../new_shapes_p0/ssb_sf100/shape/q3.3.out | 27 - .../new_shapes_p0/ssb_sf100/shape/q3.4.out | 27 - .../new_shapes_p0/ssb_sf100/shape/q4.1.out | 31 - .../new_shapes_p0/ssb_sf100/shape/q4.2.out | 32 - .../new_shapes_p0/ssb_sf100/shape/q4.3.out | 31 - .../tpcds_sf100/constraints/query23.out | 81 - .../tpcds_sf100/noStatsRfPrune/query1.out | 37 - .../tpcds_sf100/noStatsRfPrune/query10.out | 47 - .../tpcds_sf100/noStatsRfPrune/query11.out | 54 - .../tpcds_sf100/noStatsRfPrune/query12.out | 26 - .../tpcds_sf100/noStatsRfPrune/query13.out | 34 - .../tpcds_sf100/noStatsRfPrune/query14.out | 154 - .../tpcds_sf100/noStatsRfPrune/query15.out | 25 - .../tpcds_sf100/noStatsRfPrune/query16.out | 34 - .../tpcds_sf100/noStatsRfPrune/query17.out | 44 - .../tpcds_sf100/noStatsRfPrune/query18.out | 42 - .../tpcds_sf100/noStatsRfPrune/query19.out | 35 - .../tpcds_sf100/noStatsRfPrune/query2.out | 39 - .../tpcds_sf100/noStatsRfPrune/query20.out | 26 - .../tpcds_sf100/noStatsRfPrune/query21.out | 26 - .../tpcds_sf100/noStatsRfPrune/query22.out | 23 - .../tpcds_sf100/noStatsRfPrune/query23.out | 81 - .../tpcds_sf100/noStatsRfPrune/query24.out | 52 - .../tpcds_sf100/noStatsRfPrune/query25.out | 43 - .../tpcds_sf100/noStatsRfPrune/query26.out | 31 - .../tpcds_sf100/noStatsRfPrune/query27.out | 33 - .../tpcds_sf100/noStatsRfPrune/query28.out | 57 - .../tpcds_sf100/noStatsRfPrune/query29.out | 43 - .../tpcds_sf100/noStatsRfPrune/query3.out | 23 - .../tpcds_sf100/noStatsRfPrune/query30.out | 41 - .../tpcds_sf100/noStatsRfPrune/query31.out | 65 - .../tpcds_sf100/noStatsRfPrune/query32.out | 26 - .../tpcds_sf100/noStatsRfPrune/query33.out | 83 - .../tpcds_sf100/noStatsRfPrune/query34.out | 32 - .../tpcds_sf100/noStatsRfPrune/query35.out | 47 - .../tpcds_sf100/noStatsRfPrune/query36.out | 33 - .../tpcds_sf100/noStatsRfPrune/query37.out | 27 - .../tpcds_sf100/noStatsRfPrune/query38.out | 50 - .../tpcds_sf100/noStatsRfPrune/query39.out | 33 - .../tpcds_sf100/noStatsRfPrune/query4.out | 75 - .../tpcds_sf100/noStatsRfPrune/query40.out | 30 - .../tpcds_sf100/noStatsRfPrune/query41.out | 23 - .../tpcds_sf100/noStatsRfPrune/query42.out | 22 - .../tpcds_sf100/noStatsRfPrune/query43.out | 22 - .../tpcds_sf100/noStatsRfPrune/query44.out | 69 - .../tpcds_sf100/noStatsRfPrune/query45.out | 35 - .../tpcds_sf100/noStatsRfPrune/query46.out | 38 - .../tpcds_sf100/noStatsRfPrune/query47.out | 45 - .../tpcds_sf100/noStatsRfPrune/query48.out | 29 - .../tpcds_sf100/noStatsRfPrune/query49.out | 107 - .../tpcds_sf100/noStatsRfPrune/query5.out | 77 - .../tpcds_sf100/noStatsRfPrune/query50.out | 29 - .../tpcds_sf100/noStatsRfPrune/query51.out | 43 - .../tpcds_sf100/noStatsRfPrune/query52.out | 23 - .../tpcds_sf100/noStatsRfPrune/query53.out | 32 - .../tpcds_sf100/noStatsRfPrune/query54.out | 72 - .../tpcds_sf100/noStatsRfPrune/query55.out | 23 - .../tpcds_sf100/noStatsRfPrune/query56.out | 83 - .../tpcds_sf100/noStatsRfPrune/query57.out | 45 - .../tpcds_sf100/noStatsRfPrune/query58.out | 86 - .../tpcds_sf100/noStatsRfPrune/query59.out | 42 - .../tpcds_sf100/noStatsRfPrune/query6.out | 47 - .../tpcds_sf100/noStatsRfPrune/query60.out | 83 - .../tpcds_sf100/noStatsRfPrune/query61.out | 70 - .../tpcds_sf100/noStatsRfPrune/query62.out | 29 - .../tpcds_sf100/noStatsRfPrune/query63.out | 32 - .../tpcds_sf100/noStatsRfPrune/query64.out | 100 - .../tpcds_sf100/noStatsRfPrune/query65.out | 41 - .../tpcds_sf100/noStatsRfPrune/query66.out | 62 - .../tpcds_sf100/noStatsRfPrune/query67.out | 32 - .../tpcds_sf100/noStatsRfPrune/query68.out | 38 - .../tpcds_sf100/noStatsRfPrune/query69.out | 47 - .../tpcds_sf100/noStatsRfPrune/query7.out | 31 - .../tpcds_sf100/noStatsRfPrune/query70.out | 44 - .../tpcds_sf100/noStatsRfPrune/query71.out | 37 - .../tpcds_sf100/noStatsRfPrune/query72.out | 54 - .../tpcds_sf100/noStatsRfPrune/query73.out | 32 - .../tpcds_sf100/noStatsRfPrune/query74.out | 54 - .../tpcds_sf100/noStatsRfPrune/query75.out | 78 - .../tpcds_sf100/noStatsRfPrune/query76.out | 40 - .../tpcds_sf100/noStatsRfPrune/query77.out | 101 - .../tpcds_sf100/noStatsRfPrune/query78.out | 57 - .../tpcds_sf100/noStatsRfPrune/query79.out | 32 - .../tpcds_sf100/noStatsRfPrune/query8.out | 43 - .../tpcds_sf100/noStatsRfPrune/query80.out | 100 - .../tpcds_sf100/noStatsRfPrune/query81.out | 41 - .../tpcds_sf100/noStatsRfPrune/query82.out | 27 - .../tpcds_sf100/noStatsRfPrune/query83.out | 80 - .../tpcds_sf100/noStatsRfPrune/query84.out | 31 - .../tpcds_sf100/noStatsRfPrune/query85.out | 46 - .../tpcds_sf100/noStatsRfPrune/query86.out | 28 - .../tpcds_sf100/noStatsRfPrune/query87.out | 48 - .../tpcds_sf100/noStatsRfPrune/query88.out | 171 - .../tpcds_sf100/noStatsRfPrune/query89.out | 33 - .../tpcds_sf100/noStatsRfPrune/query9.out | 115 - .../tpcds_sf100/noStatsRfPrune/query90.out | 47 - .../tpcds_sf100/noStatsRfPrune/query91.out | 41 - .../tpcds_sf100/noStatsRfPrune/query92.out | 25 - .../tpcds_sf100/noStatsRfPrune/query93.out | 21 - .../tpcds_sf100/noStatsRfPrune/query94.out | 34 - .../tpcds_sf100/noStatsRfPrune/query95.out | 43 - .../tpcds_sf100/noStatsRfPrune/query96.out | 25 - .../tpcds_sf100/noStatsRfPrune/query97.out | 33 - .../tpcds_sf100/noStatsRfPrune/query98.out | 26 - .../tpcds_sf100/noStatsRfPrune/query99.out | 29 - .../tpcds_sf100/no_stats_shape/query1.out | 37 - .../tpcds_sf100/no_stats_shape/query10.out | 47 - .../tpcds_sf100/no_stats_shape/query11.out | 54 - .../tpcds_sf100/no_stats_shape/query12.out | 26 - .../tpcds_sf100/no_stats_shape/query13.out | 34 - .../tpcds_sf100/no_stats_shape/query14.out | 154 - .../tpcds_sf100/no_stats_shape/query15.out | 25 - .../tpcds_sf100/no_stats_shape/query16.out | 34 - .../tpcds_sf100/no_stats_shape/query17.out | 44 - .../tpcds_sf100/no_stats_shape/query18.out | 42 - .../tpcds_sf100/no_stats_shape/query19.out | 35 - .../tpcds_sf100/no_stats_shape/query2.out | 39 - .../tpcds_sf100/no_stats_shape/query20.out | 26 - .../tpcds_sf100/no_stats_shape/query21.out | 26 - .../tpcds_sf100/no_stats_shape/query22.out | 23 - .../tpcds_sf100/no_stats_shape/query23.out | 81 - .../tpcds_sf100/no_stats_shape/query24.out | 52 - .../tpcds_sf100/no_stats_shape/query25.out | 43 - .../tpcds_sf100/no_stats_shape/query26.out | 31 - .../tpcds_sf100/no_stats_shape/query27.out | 33 - .../tpcds_sf100/no_stats_shape/query28.out | 57 - .../tpcds_sf100/no_stats_shape/query29.out | 43 - .../tpcds_sf100/no_stats_shape/query3.out | 23 - .../tpcds_sf100/no_stats_shape/query30.out | 41 - .../tpcds_sf100/no_stats_shape/query31.out | 65 - .../tpcds_sf100/no_stats_shape/query32.out | 26 - .../tpcds_sf100/no_stats_shape/query33.out | 83 - .../tpcds_sf100/no_stats_shape/query34.out | 32 - .../tpcds_sf100/no_stats_shape/query35.out | 47 - .../tpcds_sf100/no_stats_shape/query36.out | 33 - .../tpcds_sf100/no_stats_shape/query37.out | 27 - .../tpcds_sf100/no_stats_shape/query38.out | 50 - .../tpcds_sf100/no_stats_shape/query39.out | 33 - .../tpcds_sf100/no_stats_shape/query4.out | 75 - .../tpcds_sf100/no_stats_shape/query40.out | 30 - .../tpcds_sf100/no_stats_shape/query41.out | 23 - .../tpcds_sf100/no_stats_shape/query42.out | 22 - .../tpcds_sf100/no_stats_shape/query43.out | 22 - .../tpcds_sf100/no_stats_shape/query44.out | 69 - .../tpcds_sf100/no_stats_shape/query45.out | 35 - .../tpcds_sf100/no_stats_shape/query46.out | 38 - .../tpcds_sf100/no_stats_shape/query47.out | 45 - .../tpcds_sf100/no_stats_shape/query48.out | 29 - .../tpcds_sf100/no_stats_shape/query49.out | 107 - .../tpcds_sf100/no_stats_shape/query5.out | 77 - .../tpcds_sf100/no_stats_shape/query50.out | 29 - .../tpcds_sf100/no_stats_shape/query51.out | 43 - .../tpcds_sf100/no_stats_shape/query52.out | 23 - .../tpcds_sf100/no_stats_shape/query53.out | 32 - .../tpcds_sf100/no_stats_shape/query54.out | 72 - .../tpcds_sf100/no_stats_shape/query55.out | 23 - .../tpcds_sf100/no_stats_shape/query56.out | 83 - .../tpcds_sf100/no_stats_shape/query57.out | 45 - .../tpcds_sf100/no_stats_shape/query58.out | 86 - .../tpcds_sf100/no_stats_shape/query59.out | 42 - .../tpcds_sf100/no_stats_shape/query6.out | 47 - .../tpcds_sf100/no_stats_shape/query60.out | 83 - .../tpcds_sf100/no_stats_shape/query61.out | 70 - .../tpcds_sf100/no_stats_shape/query62.out | 29 - .../tpcds_sf100/no_stats_shape/query63.out | 32 - .../tpcds_sf100/no_stats_shape/query64.out | 100 - .../tpcds_sf100/no_stats_shape/query65.out | 41 - .../tpcds_sf100/no_stats_shape/query66.out | 62 - .../tpcds_sf100/no_stats_shape/query67.out | 32 - .../tpcds_sf100/no_stats_shape/query68.out | 38 - .../tpcds_sf100/no_stats_shape/query69.out | 47 - .../tpcds_sf100/no_stats_shape/query7.out | 31 - .../tpcds_sf100/no_stats_shape/query70.out | 44 - .../tpcds_sf100/no_stats_shape/query71.out | 37 - .../tpcds_sf100/no_stats_shape/query72.out | 54 - .../tpcds_sf100/no_stats_shape/query73.out | 32 - .../tpcds_sf100/no_stats_shape/query74.out | 54 - .../tpcds_sf100/no_stats_shape/query75.out | 78 - .../tpcds_sf100/no_stats_shape/query76.out | 40 - .../tpcds_sf100/no_stats_shape/query77.out | 101 - .../tpcds_sf100/no_stats_shape/query78.out | 57 - .../tpcds_sf100/no_stats_shape/query79.out | 32 - .../tpcds_sf100/no_stats_shape/query8.out | 43 - .../tpcds_sf100/no_stats_shape/query80.out | 100 - .../tpcds_sf100/no_stats_shape/query81.out | 41 - .../tpcds_sf100/no_stats_shape/query82.out | 27 - .../tpcds_sf100/no_stats_shape/query83.out | 80 - .../tpcds_sf100/no_stats_shape/query84.out | 31 - .../tpcds_sf100/no_stats_shape/query85.out | 46 - .../tpcds_sf100/no_stats_shape/query86.out | 28 - .../tpcds_sf100/no_stats_shape/query87.out | 48 - .../tpcds_sf100/no_stats_shape/query88.out | 171 - .../tpcds_sf100/no_stats_shape/query89.out | 33 - .../tpcds_sf100/no_stats_shape/query9.out | 115 - .../tpcds_sf100/no_stats_shape/query90.out | 47 - .../tpcds_sf100/no_stats_shape/query91.out | 41 - .../tpcds_sf100/no_stats_shape/query92.out | 25 - .../tpcds_sf100/no_stats_shape/query93.out | 21 - .../tpcds_sf100/no_stats_shape/query94.out | 34 - .../tpcds_sf100/no_stats_shape/query95.out | 43 - .../tpcds_sf100/no_stats_shape/query96.out | 25 - .../tpcds_sf100/no_stats_shape/query97.out | 33 - .../tpcds_sf100/no_stats_shape/query98.out | 26 - .../tpcds_sf100/no_stats_shape/query99.out | 29 - .../tpcds_sf100/rf_prune/query1.out | 37 - .../tpcds_sf100/rf_prune/query10.out | 47 - .../tpcds_sf100/rf_prune/query11.out | 54 - .../tpcds_sf100/rf_prune/query12.out | 26 - .../tpcds_sf100/rf_prune/query13.out | 34 - .../tpcds_sf100/rf_prune/query14.out | 154 - .../tpcds_sf100/rf_prune/query15.out | 25 - .../tpcds_sf100/rf_prune/query16.out | 34 - .../tpcds_sf100/rf_prune/query17.out | 44 - .../tpcds_sf100/rf_prune/query18.out | 42 - .../tpcds_sf100/rf_prune/query19.out | 35 - .../tpcds_sf100/rf_prune/query2.out | 39 - .../tpcds_sf100/rf_prune/query20.out | 26 - .../tpcds_sf100/rf_prune/query21.out | 26 - .../tpcds_sf100/rf_prune/query22.out | 23 - .../tpcds_sf100/rf_prune/query23.out | 81 - .../tpcds_sf100/rf_prune/query24.out | 52 - .../tpcds_sf100/rf_prune/query25.out | 43 - .../tpcds_sf100/rf_prune/query26.out | 31 - .../tpcds_sf100/rf_prune/query27.out | 33 - .../tpcds_sf100/rf_prune/query28.out | 57 - .../tpcds_sf100/rf_prune/query29.out | 43 - .../tpcds_sf100/rf_prune/query3.out | 23 - .../tpcds_sf100/rf_prune/query30.out | 41 - .../tpcds_sf100/rf_prune/query31.out | 65 - .../tpcds_sf100/rf_prune/query32.out | 26 - .../tpcds_sf100/rf_prune/query33.out | 83 - .../tpcds_sf100/rf_prune/query34.out | 32 - .../tpcds_sf100/rf_prune/query35.out | 47 - .../tpcds_sf100/rf_prune/query36.out | 33 - .../tpcds_sf100/rf_prune/query37.out | 27 - .../tpcds_sf100/rf_prune/query38.out | 50 - .../tpcds_sf100/rf_prune/query39.out | 33 - .../tpcds_sf100/rf_prune/query4.out | 75 - .../tpcds_sf100/rf_prune/query40.out | 30 - .../tpcds_sf100/rf_prune/query41.out | 23 - .../tpcds_sf100/rf_prune/query42.out | 22 - .../tpcds_sf100/rf_prune/query43.out | 22 - .../tpcds_sf100/rf_prune/query44.out | 69 - .../tpcds_sf100/rf_prune/query45.out | 35 - .../tpcds_sf100/rf_prune/query46.out | 38 - .../tpcds_sf100/rf_prune/query47.out | 45 - .../tpcds_sf100/rf_prune/query48.out | 29 - .../tpcds_sf100/rf_prune/query49.out | 107 - .../tpcds_sf100/rf_prune/query5.out | 77 - .../tpcds_sf100/rf_prune/query50.out | 29 - .../tpcds_sf100/rf_prune/query51.out | 43 - .../tpcds_sf100/rf_prune/query52.out | 23 - .../tpcds_sf100/rf_prune/query53.out | 32 - .../tpcds_sf100/rf_prune/query54.out | 76 - .../tpcds_sf100/rf_prune/query55.out | 23 - .../tpcds_sf100/rf_prune/query56.out | 83 - .../tpcds_sf100/rf_prune/query57.out | 45 - .../tpcds_sf100/rf_prune/query58.out | 86 - .../tpcds_sf100/rf_prune/query59.out | 42 - .../tpcds_sf100/rf_prune/query6.out | 47 - .../tpcds_sf100/rf_prune/query60.out | 83 - .../tpcds_sf100/rf_prune/query61.out | 70 - .../tpcds_sf100/rf_prune/query62.out | 29 - .../tpcds_sf100/rf_prune/query63.out | 32 - .../tpcds_sf100/rf_prune/query64.out | 101 - .../tpcds_sf100/rf_prune/query65.out | 41 - .../tpcds_sf100/rf_prune/query66.out | 62 - .../tpcds_sf100/rf_prune/query67.out | 32 - .../tpcds_sf100/rf_prune/query68.out | 38 - .../tpcds_sf100/rf_prune/query69.out | 47 - .../tpcds_sf100/rf_prune/query7.out | 31 - .../tpcds_sf100/rf_prune/query70.out | 44 - .../tpcds_sf100/rf_prune/query71.out | 37 - .../tpcds_sf100/rf_prune/query72.out | 54 - .../tpcds_sf100/rf_prune/query73.out | 32 - .../tpcds_sf100/rf_prune/query74.out | 54 - .../tpcds_sf100/rf_prune/query75.out | 73 - .../tpcds_sf100/rf_prune/query76.out | 40 - .../tpcds_sf100/rf_prune/query77.out | 101 - .../tpcds_sf100/rf_prune/query78.out | 57 - .../tpcds_sf100/rf_prune/query79.out | 32 - .../tpcds_sf100/rf_prune/query8.out | 43 - .../tpcds_sf100/rf_prune/query80.out | 100 - .../tpcds_sf100/rf_prune/query81.out | 41 - .../tpcds_sf100/rf_prune/query82.out | 27 - .../tpcds_sf100/rf_prune/query83.out | 80 - .../tpcds_sf100/rf_prune/query84.out | 31 - .../tpcds_sf100/rf_prune/query85.out | 46 - .../tpcds_sf100/rf_prune/query86.out | 28 - .../tpcds_sf100/rf_prune/query87.out | 48 - .../tpcds_sf100/rf_prune/query88.out | 171 - .../tpcds_sf100/rf_prune/query89.out | 33 - .../tpcds_sf100/rf_prune/query9.out | 115 - .../tpcds_sf100/rf_prune/query90.out | 47 - .../tpcds_sf100/rf_prune/query91.out | 41 - .../tpcds_sf100/rf_prune/query92.out | 25 - .../tpcds_sf100/rf_prune/query93.out | 21 - .../tpcds_sf100/rf_prune/query94.out | 34 - .../tpcds_sf100/rf_prune/query95.out | 43 - .../tpcds_sf100/rf_prune/query96.out | 25 - .../tpcds_sf100/rf_prune/query97.out | 33 - .../tpcds_sf100/rf_prune/query98.out | 26 - .../tpcds_sf100/rf_prune/query99.out | 29 - .../tpcds_sf100/shape/query1.out | 37 - .../tpcds_sf100/shape/query10.out | 47 - .../tpcds_sf100/shape/query11.out | 54 - .../tpcds_sf100/shape/query12.out | 26 - .../tpcds_sf100/shape/query13.out | 34 - .../tpcds_sf100/shape/query14.out | 154 - .../tpcds_sf100/shape/query15.out | 25 - .../tpcds_sf100/shape/query16.out | 34 - .../tpcds_sf100/shape/query17.out | 44 - .../tpcds_sf100/shape/query18.out | 42 - .../tpcds_sf100/shape/query19.out | 35 - .../tpcds_sf100/shape/query2.out | 39 - .../tpcds_sf100/shape/query20.out | 26 - .../tpcds_sf100/shape/query21.out | 26 - .../tpcds_sf100/shape/query22.out | 23 - .../tpcds_sf100/shape/query23.out | 81 - .../tpcds_sf100/shape/query24.out | 52 - .../tpcds_sf100/shape/query25.out | 43 - .../tpcds_sf100/shape/query26.out | 31 - .../tpcds_sf100/shape/query27.out | 33 - .../tpcds_sf100/shape/query28.out | 57 - .../tpcds_sf100/shape/query29.out | 43 - .../tpcds_sf100/shape/query3.out | 23 - .../tpcds_sf100/shape/query30.out | 41 - .../tpcds_sf100/shape/query31.out | 65 - .../tpcds_sf100/shape/query32.out | 26 - .../tpcds_sf100/shape/query33.out | 83 - .../tpcds_sf100/shape/query34.out | 32 - .../tpcds_sf100/shape/query35.out | 47 - .../tpcds_sf100/shape/query36.out | 33 - .../tpcds_sf100/shape/query37.out | 27 - .../tpcds_sf100/shape/query38.out | 50 - .../tpcds_sf100/shape/query39.out | 33 - .../tpcds_sf100/shape/query4.out | 75 - .../tpcds_sf100/shape/query40.out | 30 - .../tpcds_sf100/shape/query41.out | 23 - .../tpcds_sf100/shape/query42.out | 22 - .../tpcds_sf100/shape/query43.out | 22 - .../tpcds_sf100/shape/query44.out | 69 - .../tpcds_sf100/shape/query45.out | 35 - .../tpcds_sf100/shape/query46.out | 38 - .../tpcds_sf100/shape/query47.out | 45 - .../tpcds_sf100/shape/query48.out | 29 - .../tpcds_sf100/shape/query49.out | 107 - .../tpcds_sf100/shape/query5.out | 77 - .../tpcds_sf100/shape/query50.out | 29 - .../tpcds_sf100/shape/query51.out | 43 - .../tpcds_sf100/shape/query52.out | 23 - .../tpcds_sf100/shape/query53.out | 32 - .../tpcds_sf100/shape/query54.out | 76 - .../tpcds_sf100/shape/query55.out | 23 - .../tpcds_sf100/shape/query56.out | 83 - .../tpcds_sf100/shape/query57.out | 45 - .../tpcds_sf100/shape/query58.out | 86 - .../tpcds_sf100/shape/query59.out | 42 - .../tpcds_sf100/shape/query6.out | 47 - .../tpcds_sf100/shape/query60.out | 83 - .../tpcds_sf100/shape/query61.out | 70 - .../tpcds_sf100/shape/query62.out | 29 - .../tpcds_sf100/shape/query63.out | 32 - .../tpcds_sf100/shape/query64.out | 101 - .../tpcds_sf100/shape/query65.out | 41 - .../tpcds_sf100/shape/query66.out | 62 - .../tpcds_sf100/shape/query67.out | 32 - .../tpcds_sf100/shape/query68.out | 38 - .../tpcds_sf100/shape/query69.out | 47 - .../tpcds_sf100/shape/query7.out | 31 - .../tpcds_sf100/shape/query70.out | 44 - .../tpcds_sf100/shape/query71.out | 37 - .../tpcds_sf100/shape/query72.out | 54 - .../tpcds_sf100/shape/query73.out | 32 - .../tpcds_sf100/shape/query74.out | 54 - .../tpcds_sf100/shape/query75.out | 73 - .../tpcds_sf100/shape/query76.out | 40 - .../tpcds_sf100/shape/query77.out | 101 - .../tpcds_sf100/shape/query78.out | 57 - .../tpcds_sf100/shape/query79.out | 32 - .../tpcds_sf100/shape/query8.out | 43 - .../tpcds_sf100/shape/query80.out | 100 - .../tpcds_sf100/shape/query81.out | 41 - .../tpcds_sf100/shape/query82.out | 27 - .../tpcds_sf100/shape/query83.out | 80 - .../tpcds_sf100/shape/query84.out | 31 - .../tpcds_sf100/shape/query85.out | 46 - .../tpcds_sf100/shape/query86.out | 28 - .../tpcds_sf100/shape/query87.out | 48 - .../tpcds_sf100/shape/query88.out | 171 - .../tpcds_sf100/shape/query89.out | 33 - .../tpcds_sf100/shape/query9.out | 115 - .../tpcds_sf100/shape/query90.out | 47 - .../tpcds_sf100/shape/query91.out | 41 - .../tpcds_sf100/shape/query92.out | 25 - .../tpcds_sf100/shape/query93.out | 21 - .../tpcds_sf100/shape/query94.out | 34 - .../tpcds_sf100/shape/query95.out | 43 - .../tpcds_sf100/shape/query96.out | 25 - .../tpcds_sf100/shape/query97.out | 33 - .../tpcds_sf100/shape/query98.out | 26 - .../tpcds_sf100/shape/query99.out | 29 - .../bs_downgrade_shape/query13.out | 34 - .../bs_downgrade_shape/query19.out | 35 - .../bs_downgrade_shape/query44.out | 69 - .../bs_downgrade_shape/query45.out | 35 - .../bs_downgrade_shape/query54.out | 76 - .../bs_downgrade_shape/query56.out | 83 - .../bs_downgrade_shape/query6.out | 47 - .../bs_downgrade_shape/query61.out | 70 - .../bs_downgrade_shape/query68.out | 38 - .../bs_downgrade_shape/query8.out | 43 - .../bs_downgrade_shape/query91.out | 41 - .../bs_downgrade_shape/query95.out | 43 - .../eliminate_empty/query10_empty.out | 47 - .../tpcds_sf1000/shape/query1.out | 37 - .../tpcds_sf1000/shape/query10.out | 47 - .../tpcds_sf1000/shape/query11.out | 54 - .../tpcds_sf1000/shape/query12.out | 26 - .../tpcds_sf1000/shape/query13.out | 34 - .../tpcds_sf1000/shape/query14.out | 152 - .../tpcds_sf1000/shape/query15.out | 25 - .../tpcds_sf1000/shape/query16.out | 34 - .../tpcds_sf1000/shape/query17.out | 44 - .../tpcds_sf1000/shape/query18.out | 42 - .../tpcds_sf1000/shape/query19.out | 35 - .../tpcds_sf1000/shape/query2.out | 39 - .../tpcds_sf1000/shape/query20.out | 26 - .../tpcds_sf1000/shape/query21.out | 26 - .../tpcds_sf1000/shape/query22.out | 23 - .../tpcds_sf1000/shape/query23.out | 81 - .../tpcds_sf1000/shape/query24.out | 52 - .../tpcds_sf1000/shape/query25.out | 43 - .../tpcds_sf1000/shape/query26.out | 31 - .../tpcds_sf1000/shape/query27.out | 33 - .../tpcds_sf1000/shape/query28.out | 57 - .../tpcds_sf1000/shape/query29.out | 43 - .../tpcds_sf1000/shape/query3.out | 23 - .../tpcds_sf1000/shape/query30.out | 41 - .../tpcds_sf1000/shape/query31.out | 65 - .../tpcds_sf1000/shape/query32.out | 26 - .../tpcds_sf1000/shape/query33.out | 83 - .../tpcds_sf1000/shape/query34.out | 32 - .../tpcds_sf1000/shape/query35.out | 47 - .../tpcds_sf1000/shape/query36.out | 33 - .../tpcds_sf1000/shape/query37.out | 27 - .../tpcds_sf1000/shape/query38.out | 50 - .../tpcds_sf1000/shape/query39.out | 33 - .../tpcds_sf1000/shape/query4.out | 75 - .../tpcds_sf1000/shape/query40.out | 30 - .../tpcds_sf1000/shape/query41.out | 23 - .../tpcds_sf1000/shape/query42.out | 22 - .../tpcds_sf1000/shape/query43.out | 22 - .../tpcds_sf1000/shape/query44.out | 69 - .../tpcds_sf1000/shape/query45.out | 35 - .../tpcds_sf1000/shape/query46.out | 38 - .../tpcds_sf1000/shape/query47.out | 45 - .../tpcds_sf1000/shape/query48.out | 29 - .../tpcds_sf1000/shape/query49.out | 107 - .../tpcds_sf1000/shape/query5.out | 77 - .../tpcds_sf1000/shape/query50.out | 29 - .../tpcds_sf1000/shape/query51.out | 43 - .../tpcds_sf1000/shape/query52.out | 23 - .../tpcds_sf1000/shape/query53.out | 32 - .../tpcds_sf1000/shape/query54.out | 76 - .../tpcds_sf1000/shape/query55.out | 23 - .../tpcds_sf1000/shape/query56.out | 83 - .../tpcds_sf1000/shape/query57.out | 45 - .../tpcds_sf1000/shape/query58.out | 86 - .../tpcds_sf1000/shape/query59.out | 42 - .../tpcds_sf1000/shape/query6.out | 47 - .../tpcds_sf1000/shape/query60.out | 83 - .../tpcds_sf1000/shape/query61.out | 70 - .../tpcds_sf1000/shape/query62.out | 29 - .../tpcds_sf1000/shape/query63.out | 32 - .../tpcds_sf1000/shape/query64.out | 101 - .../tpcds_sf1000/shape/query65.out | 41 - .../tpcds_sf1000/shape/query66.out | 62 - .../tpcds_sf1000/shape/query67.out | 32 - .../tpcds_sf1000/shape/query68.out | 38 - .../tpcds_sf1000/shape/query69.out | 47 - .../tpcds_sf1000/shape/query7.out | 31 - .../tpcds_sf1000/shape/query70.out | 44 - .../tpcds_sf1000/shape/query71.out | 37 - .../tpcds_sf1000/shape/query72.out | 54 - .../tpcds_sf1000/shape/query73.out | 32 - .../tpcds_sf1000/shape/query74.out | 54 - .../tpcds_sf1000/shape/query75.out | 73 - .../tpcds_sf1000/shape/query76.out | 40 - .../tpcds_sf1000/shape/query77.out | 101 - .../tpcds_sf1000/shape/query78.out | 57 - .../tpcds_sf1000/shape/query79.out | 32 - .../tpcds_sf1000/shape/query8.out | 43 - .../tpcds_sf1000/shape/query80.out | 100 - .../tpcds_sf1000/shape/query81.out | 41 - .../tpcds_sf1000/shape/query82.out | 27 - .../tpcds_sf1000/shape/query83.out | 80 - .../tpcds_sf1000/shape/query84.out | 31 - .../tpcds_sf1000/shape/query85.out | 46 - .../tpcds_sf1000/shape/query86.out | 28 - .../tpcds_sf1000/shape/query87.out | 48 - .../tpcds_sf1000/shape/query88.out | 171 - .../tpcds_sf1000/shape/query89.out | 33 - .../tpcds_sf1000/shape/query9.out | 115 - .../tpcds_sf1000/shape/query90.out | 47 - .../tpcds_sf1000/shape/query91.out | 41 - .../tpcds_sf1000/shape/query92.out | 25 - .../tpcds_sf1000/shape/query93.out | 21 - .../tpcds_sf1000/shape/query94.out | 34 - .../tpcds_sf1000/shape/query95.out | 43 - .../tpcds_sf1000/shape/query96.out | 25 - .../tpcds_sf1000/shape/query97.out | 35 - .../tpcds_sf1000/shape/query98.out | 26 - .../tpcds_sf1000/shape/query99.out | 29 - .../tpch_sf1000/nostats_rf_prune/q1.out | 13 - .../tpch_sf1000/nostats_rf_prune/q10.out | 26 - .../tpch_sf1000/nostats_rf_prune/q11.out | 37 - .../tpch_sf1000/nostats_rf_prune/q12.out | 17 - .../tpch_sf1000/nostats_rf_prune/q13.out | 19 - .../tpch_sf1000/nostats_rf_prune/q14.out | 15 - .../tpch_sf1000/nostats_rf_prune/q16.out | 21 - .../tpch_sf1000/nostats_rf_prune/q17.out | 19 - .../tpch_sf1000/nostats_rf_prune/q18.out | 24 - .../tpch_sf1000/nostats_rf_prune/q19.out | 15 - .../tpch_sf1000/nostats_rf_prune/q2.out | 30 - .../nostats_rf_prune/q20-rewrite.out | 31 - .../tpch_sf1000/nostats_rf_prune/q20.out | 30 - .../tpch_sf1000/nostats_rf_prune/q21.out | 34 - .../tpch_sf1000/nostats_rf_prune/q22.out | 25 - .../tpch_sf1000/nostats_rf_prune/q3.out | 21 - .../tpch_sf1000/nostats_rf_prune/q4.out | 18 - .../tpch_sf1000/nostats_rf_prune/q5.out | 34 - .../tpch_sf1000/nostats_rf_prune/q6.out | 10 - .../tpch_sf1000/nostats_rf_prune/q7.out | 34 - .../tpch_sf1000/nostats_rf_prune/q8.out | 44 - .../tpch_sf1000/nostats_rf_prune/q9.out | 33 - .../new_shapes_p0/tpch_sf1000/rf_prune/q1.out | 13 - .../tpch_sf1000/rf_prune/q10.out | 24 - .../tpch_sf1000/rf_prune/q11.out | 37 - .../tpch_sf1000/rf_prune/q12.out | 17 - .../tpch_sf1000/rf_prune/q13.out | 19 - .../tpch_sf1000/rf_prune/q14.out | 15 - .../tpch_sf1000/rf_prune/q15.out | 30 - .../tpch_sf1000/rf_prune/q16.out | 21 - .../tpch_sf1000/rf_prune/q17.out | 20 - .../tpch_sf1000/rf_prune/q18.out | 24 - .../tpch_sf1000/rf_prune/q19.out | 15 - .../new_shapes_p0/tpch_sf1000/rf_prune/q2.out | 31 - .../tpch_sf1000/rf_prune/q20-rewrite.out | 31 - .../tpch_sf1000/rf_prune/q20.out | 30 - .../tpch_sf1000/rf_prune/q21.out | 35 - .../tpch_sf1000/rf_prune/q22.out | 25 - .../new_shapes_p0/tpch_sf1000/rf_prune/q3.out | 21 - .../new_shapes_p0/tpch_sf1000/rf_prune/q4.out | 18 - .../new_shapes_p0/tpch_sf1000/rf_prune/q5.out | 34 - .../new_shapes_p0/tpch_sf1000/rf_prune/q6.out | 10 - .../new_shapes_p0/tpch_sf1000/rf_prune/q7.out | 35 - .../new_shapes_p0/tpch_sf1000/rf_prune/q8.out | 44 - .../new_shapes_p0/tpch_sf1000/rf_prune/q9.out | 33 - .../runtime_filter/test_pushdown_setop.out | 36 - .../new_shapes_p0/tpch_sf1000/shape/q1.out | 13 - .../new_shapes_p0/tpch_sf1000/shape/q10.out | 24 - .../new_shapes_p0/tpch_sf1000/shape/q11.out | 37 - .../new_shapes_p0/tpch_sf1000/shape/q12.out | 17 - .../new_shapes_p0/tpch_sf1000/shape/q13.out | 19 - .../new_shapes_p0/tpch_sf1000/shape/q14.out | 15 - .../new_shapes_p0/tpch_sf1000/shape/q15.out | 30 - .../new_shapes_p0/tpch_sf1000/shape/q16.out | 21 - .../new_shapes_p0/tpch_sf1000/shape/q17.out | 20 - .../new_shapes_p0/tpch_sf1000/shape/q18.out | 24 - .../new_shapes_p0/tpch_sf1000/shape/q19.out | 15 - .../new_shapes_p0/tpch_sf1000/shape/q2.out | 31 - .../tpch_sf1000/shape/q20-rewrite.out | 31 - .../new_shapes_p0/tpch_sf1000/shape/q20.out | 30 - .../new_shapes_p0/tpch_sf1000/shape/q21.out | 35 - .../new_shapes_p0/tpch_sf1000/shape/q22.out | 25 - .../new_shapes_p0/tpch_sf1000/shape/q3.out | 21 - .../new_shapes_p0/tpch_sf1000/shape/q4.out | 18 - .../new_shapes_p0/tpch_sf1000/shape/q5.out | 34 - .../new_shapes_p0/tpch_sf1000/shape/q6.out | 10 - .../new_shapes_p0/tpch_sf1000/shape/q7.out | 35 - .../new_shapes_p0/tpch_sf1000/shape/q8.out | 44 - .../new_shapes_p0/tpch_sf1000/shape/q9.out | 33 - .../tpch_sf1000/shape_no_stats/q1.out | 13 - .../tpch_sf1000/shape_no_stats/q10.out | 26 - .../tpch_sf1000/shape_no_stats/q11.out | 37 - .../tpch_sf1000/shape_no_stats/q12.out | 17 - .../tpch_sf1000/shape_no_stats/q13.out | 19 - .../tpch_sf1000/shape_no_stats/q14.out | 15 - .../tpch_sf1000/shape_no_stats/q15.out | 30 - .../tpch_sf1000/shape_no_stats/q16.out | 21 - .../tpch_sf1000/shape_no_stats/q17.out | 19 - .../tpch_sf1000/shape_no_stats/q18.out | 24 - .../tpch_sf1000/shape_no_stats/q19.out | 15 - .../tpch_sf1000/shape_no_stats/q2.out | 30 - .../shape_no_stats/q20-rewrite.out | 31 - .../tpch_sf1000/shape_no_stats/q20.out | 30 - .../tpch_sf1000/shape_no_stats/q21.out | 34 - .../tpch_sf1000/shape_no_stats/q22.out | 25 - .../tpch_sf1000/shape_no_stats/q3.out | 21 - .../tpch_sf1000/shape_no_stats/q4.out | 18 - .../tpch_sf1000/shape_no_stats/q5.out | 34 - .../tpch_sf1000/shape_no_stats/q6.out | 10 - .../tpch_sf1000/shape_no_stats/q7.out | 34 - .../tpch_sf1000/shape_no_stats/q8.out | 44 - .../tpch_sf1000/shape_no_stats/q9.out | 33 - .../clickbench}/query1.out | 0 .../clickbench}/query10.out | 0 .../clickbench}/query11.out | 0 .../clickbench}/query12.out | 0 .../clickbench}/query13.out | 0 .../clickbench}/query14.out | 0 .../clickbench}/query15.out | 0 .../clickbench}/query16.out | 0 .../clickbench}/query17.out | 0 .../clickbench}/query18.out | 0 .../clickbench}/query19.out | 0 .../clickbench}/query2.out | 0 .../clickbench}/query20.out | 0 .../clickbench}/query21.out | 0 .../clickbench}/query22.out | 0 .../clickbench}/query23.out | 0 .../clickbench}/query24.out | 0 .../clickbench}/query25.out | 0 .../clickbench}/query26.out | 0 .../clickbench}/query27.out | 0 .../clickbench}/query28.out | 0 .../clickbench}/query29.out | 0 .../clickbench}/query3.out | 0 .../clickbench}/query30.out | 0 .../clickbench}/query31.out | 0 .../clickbench}/query32.out | 0 .../clickbench}/query33.out | 0 .../clickbench}/query34.out | 0 .../clickbench}/query35.out | 0 .../clickbench}/query36.out | 0 .../clickbench}/query37.out | 0 .../clickbench}/query38.out | 0 .../clickbench}/query39.out | 0 .../clickbench}/query4.out | 0 .../clickbench}/query40.out | 0 .../clickbench}/query41.out | 0 .../clickbench}/query42.out | 0 .../clickbench}/query43.out | 0 .../clickbench}/query5.out | 0 .../clickbench}/query6.out | 0 .../clickbench}/query7.out | 0 .../clickbench}/query8.out | 0 .../clickbench}/query9.out | 0 .../ssb_sf100}/shape/flat.out | 0 .../ssb_sf100}/shape/q1.1.out | 0 .../ssb_sf100}/shape/q1.2.out | 0 .../ssb_sf100}/shape/q1.3.out | 0 .../ssb_sf100}/shape/q2.1.out | 0 .../ssb_sf100}/shape/q2.2.out | 0 .../ssb_sf100}/shape/q2.3.out | 0 .../ssb_sf100}/shape/q3.1.out | 0 .../ssb_sf100}/shape/q3.2.out | 0 .../ssb_sf100}/shape/q3.3.out | 0 .../ssb_sf100}/shape/q3.4.out | 0 .../ssb_sf100}/shape/q4.1.out | 0 .../ssb_sf100}/shape/q4.2.out | 0 .../ssb_sf100}/shape/q4.3.out | 0 .../tpcds_sf100}/constraints/query23.out | 0 .../tpcds_sf100}/noStatsRfPrune/query1.out | 0 .../tpcds_sf100}/noStatsRfPrune/query10.out | 0 .../tpcds_sf100}/noStatsRfPrune/query11.out | 0 .../tpcds_sf100}/noStatsRfPrune/query12.out | 0 .../tpcds_sf100}/noStatsRfPrune/query13.out | 0 .../tpcds_sf100}/noStatsRfPrune/query14.out | 0 .../tpcds_sf100}/noStatsRfPrune/query15.out | 0 .../tpcds_sf100}/noStatsRfPrune/query16.out | 0 .../tpcds_sf100}/noStatsRfPrune/query17.out | 0 .../tpcds_sf100}/noStatsRfPrune/query18.out | 0 .../tpcds_sf100}/noStatsRfPrune/query19.out | 0 .../tpcds_sf100}/noStatsRfPrune/query2.out | 0 .../tpcds_sf100}/noStatsRfPrune/query20.out | 0 .../tpcds_sf100}/noStatsRfPrune/query21.out | 0 .../tpcds_sf100}/noStatsRfPrune/query22.out | 0 .../tpcds_sf100}/noStatsRfPrune/query23.out | 0 .../tpcds_sf100}/noStatsRfPrune/query24.out | 0 .../tpcds_sf100}/noStatsRfPrune/query25.out | 0 .../tpcds_sf100}/noStatsRfPrune/query26.out | 0 .../tpcds_sf100}/noStatsRfPrune/query27.out | 0 .../tpcds_sf100}/noStatsRfPrune/query28.out | 0 .../tpcds_sf100}/noStatsRfPrune/query29.out | 0 .../tpcds_sf100}/noStatsRfPrune/query3.out | 0 .../tpcds_sf100}/noStatsRfPrune/query30.out | 0 .../tpcds_sf100}/noStatsRfPrune/query31.out | 0 .../tpcds_sf100}/noStatsRfPrune/query32.out | 0 .../tpcds_sf100}/noStatsRfPrune/query33.out | 0 .../tpcds_sf100}/noStatsRfPrune/query34.out | 0 .../tpcds_sf100}/noStatsRfPrune/query35.out | 0 .../tpcds_sf100}/noStatsRfPrune/query36.out | 0 .../tpcds_sf100}/noStatsRfPrune/query37.out | 0 .../tpcds_sf100}/noStatsRfPrune/query38.out | 0 .../tpcds_sf100}/noStatsRfPrune/query39.out | 0 .../tpcds_sf100}/noStatsRfPrune/query4.out | 0 .../tpcds_sf100}/noStatsRfPrune/query40.out | 0 .../tpcds_sf100}/noStatsRfPrune/query41.out | 0 .../tpcds_sf100}/noStatsRfPrune/query42.out | 0 .../tpcds_sf100/noStatsRfPrune}/query43.out | 0 .../tpcds_sf100}/noStatsRfPrune/query44.out | 0 .../tpcds_sf100}/noStatsRfPrune/query45.out | 0 .../tpcds_sf100}/noStatsRfPrune/query46.out | 0 .../tpcds_sf100}/noStatsRfPrune/query47.out | 0 .../tpcds_sf100}/noStatsRfPrune/query48.out | 0 .../tpcds_sf100}/noStatsRfPrune/query49.out | 0 .../tpcds_sf100}/noStatsRfPrune/query5.out | 0 .../tpcds_sf100}/noStatsRfPrune/query50.out | 0 .../tpcds_sf100}/noStatsRfPrune/query51.out | 0 .../tpcds_sf100}/noStatsRfPrune/query52.out | 0 .../tpcds_sf100}/noStatsRfPrune/query53.out | 0 .../tpcds_sf100}/noStatsRfPrune/query54.out | 0 .../tpcds_sf100}/noStatsRfPrune/query55.out | 0 .../tpcds_sf100}/noStatsRfPrune/query56.out | 0 .../tpcds_sf100}/noStatsRfPrune/query57.out | 0 .../tpcds_sf100}/noStatsRfPrune/query58.out | 0 .../tpcds_sf100}/noStatsRfPrune/query59.out | 0 .../tpcds_sf100}/noStatsRfPrune/query6.out | 0 .../tpcds_sf100}/noStatsRfPrune/query60.out | 0 .../tpcds_sf100}/noStatsRfPrune/query61.out | 0 .../tpcds_sf100}/noStatsRfPrune/query62.out | 0 .../tpcds_sf100}/noStatsRfPrune/query63.out | 0 .../tpcds_sf100}/noStatsRfPrune/query64.out | 0 .../tpcds_sf100}/noStatsRfPrune/query65.out | 0 .../tpcds_sf100}/noStatsRfPrune/query66.out | 0 .../tpcds_sf100}/noStatsRfPrune/query67.out | 0 .../tpcds_sf100}/noStatsRfPrune/query68.out | 0 .../tpcds_sf100}/noStatsRfPrune/query69.out | 0 .../tpcds_sf100}/noStatsRfPrune/query7.out | 0 .../tpcds_sf100}/noStatsRfPrune/query70.out | 0 .../tpcds_sf100}/noStatsRfPrune/query71.out | 0 .../tpcds_sf100}/noStatsRfPrune/query72.out | 0 .../tpcds_sf100}/noStatsRfPrune/query73.out | 0 .../tpcds_sf100}/noStatsRfPrune/query74.out | 0 .../tpcds_sf100}/noStatsRfPrune/query75.out | 0 .../tpcds_sf100}/noStatsRfPrune/query76.out | 0 .../tpcds_sf100}/noStatsRfPrune/query77.out | 0 .../tpcds_sf100}/noStatsRfPrune/query78.out | 0 .../tpcds_sf100}/noStatsRfPrune/query79.out | 0 .../tpcds_sf100}/noStatsRfPrune/query8.out | 0 .../tpcds_sf100}/noStatsRfPrune/query80.out | 0 .../tpcds_sf100}/noStatsRfPrune/query81.out | 0 .../tpcds_sf100}/noStatsRfPrune/query82.out | 0 .../tpcds_sf100}/noStatsRfPrune/query83.out | 0 .../tpcds_sf100}/noStatsRfPrune/query84.out | 0 .../tpcds_sf100}/noStatsRfPrune/query85.out | 0 .../tpcds_sf100}/noStatsRfPrune/query86.out | 0 .../tpcds_sf100}/noStatsRfPrune/query87.out | 0 .../tpcds_sf100}/noStatsRfPrune/query88.out | 0 .../tpcds_sf100}/noStatsRfPrune/query89.out | 0 .../tpcds_sf100/noStatsRfPrune}/query9.out | 0 .../tpcds_sf100}/noStatsRfPrune/query90.out | 0 .../tpcds_sf100}/noStatsRfPrune/query91.out | 0 .../tpcds_sf100}/noStatsRfPrune/query92.out | 0 .../tpcds_sf100}/noStatsRfPrune/query93.out | 0 .../tpcds_sf100}/noStatsRfPrune/query94.out | 0 .../tpcds_sf100}/noStatsRfPrune/query95.out | 0 .../tpcds_sf100}/noStatsRfPrune/query96.out | 0 .../tpcds_sf100}/noStatsRfPrune/query97.out | 0 .../tpcds_sf100}/noStatsRfPrune/query98.out | 0 .../tpcds_sf100}/noStatsRfPrune/query99.out | 0 .../tpcds_sf100}/no_stats_shape/query1.out | 0 .../tpcds_sf100}/no_stats_shape/query10.out | 0 .../tpcds_sf100}/no_stats_shape/query11.out | 0 .../tpcds_sf100}/no_stats_shape/query12.out | 0 .../tpcds_sf100}/no_stats_shape/query13.out | 0 .../tpcds_sf100}/no_stats_shape/query14.out | 0 .../tpcds_sf100}/no_stats_shape/query15.out | 0 .../tpcds_sf100}/no_stats_shape/query16.out | 0 .../tpcds_sf100}/no_stats_shape/query17.out | 0 .../tpcds_sf100}/no_stats_shape/query18.out | 0 .../tpcds_sf100}/no_stats_shape/query19.out | 0 .../tpcds_sf100}/no_stats_shape/query2.out | 0 .../tpcds_sf100}/no_stats_shape/query20.out | 0 .../tpcds_sf100}/no_stats_shape/query21.out | 0 .../tpcds_sf100}/no_stats_shape/query22.out | 0 .../tpcds_sf100}/no_stats_shape/query23.out | 0 .../tpcds_sf100}/no_stats_shape/query24.out | 0 .../tpcds_sf100}/no_stats_shape/query25.out | 0 .../tpcds_sf100}/no_stats_shape/query26.out | 0 .../tpcds_sf100}/no_stats_shape/query27.out | 0 .../tpcds_sf100}/no_stats_shape/query28.out | 0 .../tpcds_sf100}/no_stats_shape/query29.out | 0 .../tpcds_sf100}/no_stats_shape/query3.out | 0 .../tpcds_sf100}/no_stats_shape/query30.out | 0 .../tpcds_sf100}/no_stats_shape/query31.out | 0 .../tpcds_sf100}/no_stats_shape/query32.out | 0 .../tpcds_sf100}/no_stats_shape/query33.out | 0 .../tpcds_sf100}/no_stats_shape/query34.out | 0 .../tpcds_sf100}/no_stats_shape/query35.out | 0 .../tpcds_sf100}/no_stats_shape/query36.out | 0 .../tpcds_sf100}/no_stats_shape/query37.out | 0 .../tpcds_sf100}/no_stats_shape/query38.out | 0 .../tpcds_sf100}/no_stats_shape/query39.out | 0 .../tpcds_sf100}/no_stats_shape/query4.out | 0 .../tpcds_sf100}/no_stats_shape/query40.out | 0 .../tpcds_sf100}/no_stats_shape/query41.out | 0 .../tpcds_sf100}/no_stats_shape/query42.out | 0 .../tpcds_sf100/no_stats_shape}/query43.out | 0 .../tpcds_sf100}/no_stats_shape/query44.out | 0 .../tpcds_sf100}/no_stats_shape/query45.out | 0 .../tpcds_sf100}/no_stats_shape/query46.out | 0 .../tpcds_sf100}/no_stats_shape/query47.out | 0 .../tpcds_sf100}/no_stats_shape/query48.out | 0 .../tpcds_sf100}/no_stats_shape/query49.out | 0 .../tpcds_sf100}/no_stats_shape/query5.out | 0 .../tpcds_sf100}/no_stats_shape/query50.out | 0 .../tpcds_sf100}/no_stats_shape/query51.out | 0 .../tpcds_sf100}/no_stats_shape/query52.out | 0 .../tpcds_sf100}/no_stats_shape/query53.out | 0 .../tpcds_sf100}/no_stats_shape/query54.out | 0 .../tpcds_sf100}/no_stats_shape/query55.out | 0 .../tpcds_sf100}/no_stats_shape/query56.out | 0 .../tpcds_sf100}/no_stats_shape/query57.out | 0 .../tpcds_sf100}/no_stats_shape/query58.out | 0 .../tpcds_sf100}/no_stats_shape/query59.out | 0 .../tpcds_sf100}/no_stats_shape/query6.out | 0 .../tpcds_sf100}/no_stats_shape/query60.out | 0 .../tpcds_sf100}/no_stats_shape/query61.out | 0 .../tpcds_sf100}/no_stats_shape/query62.out | 0 .../tpcds_sf100}/no_stats_shape/query63.out | 0 .../tpcds_sf100}/no_stats_shape/query64.out | 0 .../tpcds_sf100}/no_stats_shape/query65.out | 0 .../tpcds_sf100}/no_stats_shape/query66.out | 0 .../tpcds_sf100}/no_stats_shape/query67.out | 0 .../tpcds_sf100}/no_stats_shape/query68.out | 0 .../tpcds_sf100}/no_stats_shape/query69.out | 0 .../tpcds_sf100}/no_stats_shape/query7.out | 0 .../tpcds_sf100}/no_stats_shape/query70.out | 0 .../tpcds_sf100}/no_stats_shape/query71.out | 0 .../tpcds_sf100}/no_stats_shape/query72.out | 0 .../tpcds_sf100}/no_stats_shape/query73.out | 0 .../tpcds_sf100}/no_stats_shape/query74.out | 0 .../tpcds_sf100}/no_stats_shape/query75.out | 0 .../tpcds_sf100}/no_stats_shape/query76.out | 0 .../tpcds_sf100}/no_stats_shape/query77.out | 0 .../tpcds_sf100}/no_stats_shape/query78.out | 0 .../tpcds_sf100}/no_stats_shape/query79.out | 0 .../tpcds_sf100}/no_stats_shape/query8.out | 0 .../tpcds_sf100}/no_stats_shape/query80.out | 0 .../tpcds_sf100}/no_stats_shape/query81.out | 0 .../tpcds_sf100}/no_stats_shape/query82.out | 0 .../tpcds_sf100}/no_stats_shape/query83.out | 0 .../tpcds_sf100}/no_stats_shape/query84.out | 0 .../tpcds_sf100}/no_stats_shape/query85.out | 0 .../tpcds_sf100}/no_stats_shape/query86.out | 0 .../tpcds_sf100}/no_stats_shape/query87.out | 0 .../tpcds_sf100}/no_stats_shape/query88.out | 0 .../tpcds_sf100}/no_stats_shape/query89.out | 0 .../tpcds_sf100/no_stats_shape}/query9.out | 0 .../tpcds_sf100}/no_stats_shape/query90.out | 0 .../tpcds_sf100}/no_stats_shape/query91.out | 0 .../tpcds_sf100}/no_stats_shape/query92.out | 0 .../tpcds_sf100}/no_stats_shape/query93.out | 0 .../tpcds_sf100}/no_stats_shape/query94.out | 0 .../tpcds_sf100}/no_stats_shape/query95.out | 0 .../tpcds_sf100}/no_stats_shape/query96.out | 0 .../tpcds_sf100}/no_stats_shape/query97.out | 0 .../tpcds_sf100}/no_stats_shape/query98.out | 0 .../tpcds_sf100}/no_stats_shape/query99.out | 0 .../tpcds_sf100}/rf_prune/query1.out | 0 .../tpcds_sf100}/rf_prune/query10.out | 0 .../tpcds_sf100}/rf_prune/query11.out | 0 .../tpcds_sf100}/rf_prune/query12.out | 0 .../tpcds_sf100}/rf_prune/query13.out | 0 .../tpcds_sf100}/rf_prune/query14.out | 0 .../tpcds_sf100}/rf_prune/query15.out | 0 .../tpcds_sf100}/rf_prune/query16.out | 0 .../tpcds_sf100}/rf_prune/query17.out | 0 .../tpcds_sf100}/rf_prune/query18.out | 0 .../tpcds_sf100}/rf_prune/query19.out | 0 .../tpcds_sf100}/rf_prune/query2.out | 0 .../tpcds_sf100}/rf_prune/query20.out | 0 .../tpcds_sf100}/rf_prune/query21.out | 0 .../tpcds_sf100}/rf_prune/query22.out | 0 .../tpcds_sf100}/rf_prune/query23.out | 0 .../tpcds_sf100}/rf_prune/query24.out | 0 .../tpcds_sf100}/rf_prune/query25.out | 0 .../tpcds_sf100}/rf_prune/query26.out | 0 .../tpcds_sf100}/rf_prune/query27.out | 0 .../tpcds_sf100}/rf_prune/query28.out | 0 .../tpcds_sf100}/rf_prune/query29.out | 0 .../tpcds_sf100/rf_prune}/query3.out | 0 .../tpcds_sf100}/rf_prune/query30.out | 0 .../tpcds_sf100}/rf_prune/query31.out | 0 .../tpcds_sf100}/rf_prune/query32.out | 0 .../tpcds_sf100}/rf_prune/query33.out | 0 .../tpcds_sf100}/rf_prune/query34.out | 0 .../tpcds_sf100}/rf_prune/query35.out | 0 .../tpcds_sf100}/rf_prune/query36.out | 0 .../tpcds_sf100}/rf_prune/query37.out | 0 .../tpcds_sf100}/rf_prune/query38.out | 0 .../tpcds_sf100}/rf_prune/query39.out | 0 .../tpcds_sf100}/rf_prune/query4.out | 0 .../tpcds_sf100}/rf_prune/query40.out | 0 .../tpcds_sf100}/rf_prune/query41.out | 0 .../tpcds_sf100}/rf_prune/query42.out | 0 .../tpcds_sf100/rf_prune}/query43.out | 0 .../tpcds_sf100}/rf_prune/query44.out | 0 .../tpcds_sf100}/rf_prune/query45.out | 0 .../tpcds_sf100}/rf_prune/query46.out | 0 .../tpcds_sf100}/rf_prune/query47.out | 0 .../tpcds_sf100}/rf_prune/query48.out | 0 .../tpcds_sf100}/rf_prune/query49.out | 0 .../tpcds_sf100}/rf_prune/query5.out | 0 .../tpcds_sf100}/rf_prune/query50.out | 0 .../tpcds_sf100}/rf_prune/query51.out | 0 .../tpcds_sf100}/rf_prune/query52.out | 0 .../tpcds_sf100}/rf_prune/query53.out | 0 .../tpcds_sf100}/rf_prune/query54.out | 0 .../tpcds_sf100}/rf_prune/query55.out | 0 .../tpcds_sf100}/rf_prune/query56.out | 0 .../tpcds_sf100}/rf_prune/query57.out | 0 .../tpcds_sf100}/rf_prune/query58.out | 0 .../tpcds_sf100}/rf_prune/query59.out | 0 .../tpcds_sf100}/rf_prune/query6.out | 0 .../tpcds_sf100}/rf_prune/query60.out | 0 .../tpcds_sf100}/rf_prune/query61.out | 0 .../tpcds_sf100}/rf_prune/query62.out | 0 .../tpcds_sf100}/rf_prune/query63.out | 0 .../tpcds_sf100}/rf_prune/query64.out | 0 .../tpcds_sf100}/rf_prune/query65.out | 0 .../tpcds_sf100}/rf_prune/query66.out | 0 .../tpcds_sf100}/rf_prune/query67.out | 0 .../tpcds_sf100}/rf_prune/query68.out | 0 .../tpcds_sf100}/rf_prune/query69.out | 0 .../tpcds_sf100}/rf_prune/query7.out | 0 .../tpcds_sf100}/rf_prune/query70.out | 0 .../tpcds_sf100}/rf_prune/query71.out | 0 .../tpcds_sf100}/rf_prune/query72.out | 0 .../tpcds_sf100}/rf_prune/query73.out | 0 .../tpcds_sf100}/rf_prune/query74.out | 0 .../tpcds_sf100}/rf_prune/query75.out | 0 .../tpcds_sf100}/rf_prune/query76.out | 0 .../tpcds_sf100}/rf_prune/query77.out | 0 .../tpcds_sf100}/rf_prune/query78.out | 0 .../tpcds_sf100}/rf_prune/query79.out | 0 .../tpcds_sf100}/rf_prune/query8.out | 0 .../tpcds_sf100}/rf_prune/query80.out | 0 .../tpcds_sf100}/rf_prune/query81.out | 0 .../tpcds_sf100}/rf_prune/query82.out | 0 .../tpcds_sf100}/rf_prune/query83.out | 0 .../tpcds_sf100}/rf_prune/query84.out | 0 .../tpcds_sf100}/rf_prune/query85.out | 0 .../tpcds_sf100}/rf_prune/query86.out | 0 .../tpcds_sf100}/rf_prune/query87.out | 0 .../tpcds_sf100}/rf_prune/query88.out | 0 .../tpcds_sf100}/rf_prune/query89.out | 0 .../tpcds_sf100/rf_prune}/query9.out | 0 .../tpcds_sf100}/rf_prune/query90.out | 0 .../tpcds_sf100}/rf_prune/query91.out | 0 .../tpcds_sf100}/rf_prune/query92.out | 0 .../tpcds_sf100}/rf_prune/query93.out | 0 .../tpcds_sf100}/rf_prune/query94.out | 0 .../tpcds_sf100}/rf_prune/query95.out | 0 .../tpcds_sf100}/rf_prune/query96.out | 0 .../tpcds_sf100}/rf_prune/query97.out | 0 .../tpcds_sf100}/rf_prune/query98.out | 0 .../tpcds_sf100}/rf_prune/query99.out | 0 .../tpcds_sf100}/shape/query1.out | 0 .../tpcds_sf100}/shape/query10.out | 0 .../tpcds_sf100}/shape/query11.out | 0 .../tpcds_sf100}/shape/query12.out | 0 .../tpcds_sf100}/shape/query13.out | 0 .../tpcds_sf100}/shape/query14.out | 0 .../tpcds_sf100}/shape/query15.out | 0 .../tpcds_sf100}/shape/query16.out | 0 .../tpcds_sf100}/shape/query17.out | 0 .../tpcds_sf100}/shape/query18.out | 0 .../tpcds_sf100}/shape/query19.out | 0 .../tpcds_sf100}/shape/query2.out | 0 .../tpcds_sf100}/shape/query20.out | 0 .../tpcds_sf100}/shape/query21.out | 0 .../tpcds_sf100}/shape/query22.out | 0 .../tpcds_sf100}/shape/query23.out | 0 .../tpcds_sf100}/shape/query24.out | 0 .../tpcds_sf100}/shape/query25.out | 0 .../tpcds_sf100}/shape/query26.out | 0 .../tpcds_sf100}/shape/query27.out | 0 .../tpcds_sf100}/shape/query28.out | 0 .../tpcds_sf100}/shape/query29.out | 0 .../tpcds_sf100/shape}/query3.out | 0 .../tpcds_sf100}/shape/query30.out | 0 .../tpcds_sf100}/shape/query31.out | 0 .../tpcds_sf100}/shape/query32.out | 0 .../tpcds_sf100}/shape/query33.out | 0 .../tpcds_sf100}/shape/query34.out | 0 .../tpcds_sf100}/shape/query35.out | 0 .../tpcds_sf100}/shape/query36.out | 0 .../tpcds_sf100}/shape/query37.out | 0 .../tpcds_sf100}/shape/query38.out | 0 .../tpcds_sf100}/shape/query39.out | 0 .../tpcds_sf100}/shape/query4.out | 0 .../tpcds_sf100}/shape/query40.out | 0 .../tpcds_sf100}/shape/query41.out | 0 .../tpcds_sf100}/shape/query42.out | 0 .../tpcds_sf100/shape}/query43.out | 0 .../tpcds_sf100}/shape/query44.out | 0 .../tpcds_sf100}/shape/query45.out | 0 .../tpcds_sf100}/shape/query46.out | 0 .../tpcds_sf100}/shape/query47.out | 0 .../tpcds_sf100}/shape/query48.out | 0 .../tpcds_sf100}/shape/query49.out | 0 .../tpcds_sf100}/shape/query5.out | 0 .../tpcds_sf100}/shape/query50.out | 0 .../tpcds_sf100}/shape/query51.out | 0 .../tpcds_sf100}/shape/query52.out | 0 .../tpcds_sf100}/shape/query53.out | 0 .../tpcds_sf100}/shape/query54.out | 0 .../tpcds_sf100}/shape/query55.out | 0 .../tpcds_sf100}/shape/query56.out | 0 .../tpcds_sf100}/shape/query57.out | 0 .../tpcds_sf100}/shape/query58.out | 0 .../tpcds_sf100}/shape/query59.out | 0 .../tpcds_sf100}/shape/query6.out | 0 .../tpcds_sf100}/shape/query60.out | 0 .../tpcds_sf100}/shape/query61.out | 0 .../tpcds_sf100}/shape/query62.out | 0 .../tpcds_sf100}/shape/query63.out | 0 .../tpcds_sf100}/shape/query64.out | 0 .../tpcds_sf100}/shape/query65.out | 0 .../tpcds_sf100}/shape/query66.out | 0 .../tpcds_sf100}/shape/query67.out | 0 .../tpcds_sf100}/shape/query68.out | 0 .../tpcds_sf100}/shape/query69.out | 0 .../tpcds_sf100}/shape/query7.out | 0 .../tpcds_sf100}/shape/query70.out | 0 .../tpcds_sf100}/shape/query71.out | 0 .../tpcds_sf100}/shape/query72.out | 0 .../tpcds_sf100}/shape/query73.out | 0 .../tpcds_sf100}/shape/query74.out | 0 .../tpcds_sf100}/shape/query75.out | 0 .../tpcds_sf100}/shape/query76.out | 0 .../tpcds_sf100}/shape/query77.out | 0 .../tpcds_sf100}/shape/query78.out | 0 .../tpcds_sf100}/shape/query79.out | 0 .../tpcds_sf100/shape}/query8.out | 0 .../tpcds_sf100}/shape/query80.out | 0 .../tpcds_sf100}/shape/query81.out | 0 .../tpcds_sf100}/shape/query82.out | 0 .../tpcds_sf100}/shape/query83.out | 0 .../tpcds_sf100}/shape/query84.out | 0 .../tpcds_sf100}/shape/query85.out | 0 .../tpcds_sf100}/shape/query86.out | 0 .../tpcds_sf100}/shape/query87.out | 0 .../tpcds_sf100}/shape/query88.out | 0 .../tpcds_sf100}/shape/query89.out | 0 .../tpcds_sf100/shape}/query9.out | 0 .../tpcds_sf100}/shape/query90.out | 0 .../tpcds_sf100}/shape/query91.out | 0 .../tpcds_sf100}/shape/query92.out | 0 .../tpcds_sf100}/shape/query93.out | 0 .../tpcds_sf100}/shape/query94.out | 0 .../tpcds_sf100}/shape/query95.out | 0 .../tpcds_sf100}/shape/query96.out | 0 .../tpcds_sf100}/shape/query97.out | 0 .../tpcds_sf100}/shape/query98.out | 0 .../tpcds_sf100}/shape/query99.out | 0 .../bs_downgrade_shape/query13.out | 0 .../bs_downgrade_shape/query19.out | 0 .../bs_downgrade_shape/query44.out | 0 .../bs_downgrade_shape}/query45.out | 0 .../bs_downgrade_shape/query54.out | 0 .../bs_downgrade_shape}/query56.out | 0 .../bs_downgrade_shape/query6.out | 0 .../bs_downgrade_shape/query61.out | 0 .../bs_downgrade_shape/query68.out | 0 .../bs_downgrade_shape}/query8.out | 0 .../bs_downgrade_shape/query91.out | 0 .../bs_downgrade_shape}/query95.out | 0 .../eliminate_empty/query10_empty.out | 0 .../tpcds_sf1000/hint}/query1.out | 0 .../tpcds_sf1000/hint}/query10.out | 0 .../tpcds_sf1000/hint}/query11.out | 0 .../tpcds_sf1000/hint}/query12.out | 0 .../tpcds_sf1000/hint}/query13.out | 0 .../tpcds_sf1000/hint}/query14.out | 0 .../tpcds_sf1000/hint}/query15.out | 0 .../tpcds_sf1000/hint}/query16.out | 0 .../tpcds_sf1000/hint}/query17.out | 0 .../tpcds_sf1000/hint}/query18.out | 0 .../tpcds_sf1000/hint}/query19.out | 0 .../tpcds_sf1000/hint}/query2.out | 0 .../tpcds_sf1000/hint}/query20.out | 0 .../tpcds_sf1000/hint}/query21.out | 0 .../tpcds_sf1000/hint}/query22.out | 0 .../tpcds_sf1000/hint}/query23.out | 0 .../tpcds_sf1000/hint}/query24.out | 0 .../tpcds_sf1000/hint}/query25.out | 0 .../tpcds_sf1000/hint}/query26.out | 0 .../tpcds_sf1000/hint}/query27.out | 0 .../tpcds_sf1000/hint}/query28.out | 0 .../tpcds_sf1000/hint}/query29.out | 0 .../tpcds_sf1000/hint}/query3.out | 0 .../tpcds_sf1000/hint}/query30.out | 0 .../tpcds_sf1000/hint}/query31.out | 0 .../tpcds_sf1000/hint}/query32.out | 0 .../tpcds_sf1000/hint}/query33.out | 0 .../tpcds_sf1000/hint}/query34.out | 0 .../tpcds_sf1000/hint}/query35.out | 0 .../tpcds_sf1000/hint}/query36.out | 0 .../tpcds_sf1000/hint}/query37.out | 0 .../tpcds_sf1000/hint}/query38.out | 0 .../tpcds_sf1000/hint}/query39.out | 0 .../tpcds_sf1000/hint}/query4.out | 0 .../tpcds_sf1000/hint}/query40.out | 0 .../tpcds_sf1000/hint}/query41.out | 0 .../tpcds_sf1000/hint}/query42.out | 0 .../tpcds_sf1000/hint}/query43.out | 0 .../tpcds_sf1000/hint}/query44.out | 0 .../tpcds_sf1000/hint}/query45.out | 0 .../tpcds_sf1000/hint}/query46.out | 0 .../tpcds_sf1000/hint}/query47.out | 0 .../tpcds_sf1000/hint}/query48.out | 0 .../tpcds_sf1000/hint}/query49.out | 0 .../tpcds_sf1000/hint}/query5.out | 0 .../tpcds_sf1000/hint}/query50.out | 0 .../tpcds_sf1000/hint}/query51.out | 0 .../tpcds_sf1000/hint}/query52.out | 0 .../tpcds_sf1000/hint}/query53.out | 0 .../tpcds_sf1000/hint}/query54.out | 0 .../tpcds_sf1000/hint}/query55.out | 0 .../tpcds_sf1000/hint}/query56.out | 0 .../tpcds_sf1000/hint}/query57.out | 0 .../tpcds_sf1000/hint}/query58.out | 0 .../tpcds_sf1000/hint}/query59.out | 0 .../tpcds_sf1000/hint}/query6.out | 0 .../tpcds_sf1000/hint}/query60.out | 0 .../tpcds_sf1000/hint}/query61.out | 0 .../tpcds_sf1000/hint}/query62.out | 0 .../tpcds_sf1000/hint}/query63.out | 0 .../tpcds_sf1000/hint}/query64.out | 0 .../tpcds_sf1000/hint}/query65.out | 0 .../tpcds_sf1000/hint}/query66.out | 0 .../tpcds_sf1000/hint}/query67.out | 0 .../tpcds_sf1000/hint}/query68.out | 0 .../tpcds_sf1000/hint}/query69.out | 0 .../tpcds_sf1000/hint}/query7.out | 0 .../tpcds_sf1000/hint}/query70.out | 0 .../tpcds_sf1000/hint}/query71.out | 0 .../tpcds_sf1000/hint}/query72.out | 0 .../tpcds_sf1000/hint}/query73.out | 0 .../tpcds_sf1000/hint}/query74.out | 0 .../tpcds_sf1000/hint}/query75.out | 0 .../tpcds_sf1000/hint}/query76.out | 0 .../tpcds_sf1000/hint}/query77.out | 0 .../tpcds_sf1000/hint}/query78.out | 0 .../tpcds_sf1000/hint}/query79.out | 0 .../tpcds_sf1000/hint}/query8.out | 0 .../tpcds_sf1000/hint}/query80.out | 0 .../tpcds_sf1000/hint}/query81.out | 0 .../tpcds_sf1000/hint}/query82.out | 0 .../tpcds_sf1000/hint}/query83.out | 0 .../tpcds_sf1000/hint}/query84.out | 0 .../tpcds_sf1000/hint}/query85.out | 0 .../tpcds_sf1000/hint}/query86.out | 0 .../tpcds_sf1000/hint}/query87.out | 0 .../tpcds_sf1000/hint}/query88.out | 0 .../tpcds_sf1000/hint}/query89.out | 0 .../tpcds_sf1000/hint}/query9.out | 0 .../tpcds_sf1000/hint}/query90.out | 0 .../tpcds_sf1000/hint}/query91.out | 0 .../tpcds_sf1000/hint}/query92.out | 0 .../tpcds_sf1000/hint}/query93.out | 0 .../tpcds_sf1000/hint}/query94.out | 0 .../tpcds_sf1000/hint}/query95.out | 0 .../tpcds_sf1000/hint}/query96.out | 0 .../tpcds_sf1000/hint}/query97.out | 0 .../tpcds_sf1000/hint}/query98.out | 0 .../tpcds_sf1000/hint}/query99.out | 0 .../tpcds_sf1000}/shape/query1.out | 0 .../tpcds_sf1000}/shape/query10.out | 0 .../tpcds_sf1000}/shape/query11.out | 0 .../tpcds_sf1000}/shape/query12.out | 0 .../tpcds_sf1000}/shape/query13.out | 0 .../tpcds_sf1000}/shape/query14.out | 0 .../tpcds_sf1000}/shape/query15.out | 0 .../tpcds_sf1000}/shape/query16.out | 0 .../tpcds_sf1000}/shape/query17.out | 0 .../tpcds_sf1000}/shape/query18.out | 0 .../tpcds_sf1000}/shape/query19.out | 0 .../tpcds_sf1000}/shape/query2.out | 0 .../tpcds_sf1000}/shape/query20.out | 0 .../tpcds_sf1000}/shape/query21.out | 0 .../tpcds_sf1000}/shape/query22.out | 0 .../tpcds_sf1000}/shape/query23.out | 0 .../tpcds_sf1000}/shape/query24.out | 0 .../tpcds_sf1000}/shape/query25.out | 0 .../tpcds_sf1000}/shape/query26.out | 0 .../tpcds_sf1000}/shape/query27.out | 0 .../tpcds_sf1000}/shape/query28.out | 0 .../tpcds_sf1000}/shape/query29.out | 0 .../tpcds_sf1000}/shape/query3.out | 0 .../tpcds_sf1000}/shape/query30.out | 0 .../tpcds_sf1000}/shape/query31.out | 0 .../tpcds_sf1000}/shape/query32.out | 0 .../tpcds_sf1000}/shape/query33.out | 0 .../tpcds_sf1000}/shape/query34.out | 0 .../tpcds_sf1000}/shape/query35.out | 0 .../tpcds_sf1000}/shape/query36.out | 0 .../tpcds_sf1000}/shape/query37.out | 0 .../tpcds_sf1000}/shape/query38.out | 0 .../tpcds_sf1000}/shape/query39.out | 0 .../tpcds_sf1000}/shape/query4.out | 0 .../tpcds_sf1000}/shape/query40.out | 0 .../tpcds_sf1000}/shape/query41.out | 0 .../tpcds_sf1000}/shape/query42.out | 0 .../tpcds_sf1000}/shape/query43.out | 0 .../tpcds_sf1000}/shape/query44.out | 0 .../tpcds_sf1000}/shape/query45.out | 0 .../tpcds_sf1000}/shape/query46.out | 0 .../tpcds_sf1000}/shape/query47.out | 0 .../tpcds_sf1000}/shape/query48.out | 0 .../tpcds_sf1000}/shape/query49.out | 0 .../tpcds_sf1000}/shape/query5.out | 0 .../tpcds_sf1000}/shape/query50.out | 0 .../tpcds_sf1000}/shape/query51.out | 0 .../tpcds_sf1000}/shape/query52.out | 0 .../tpcds_sf1000}/shape/query53.out | 0 .../tpcds_sf1000}/shape/query54.out | 0 .../tpcds_sf1000}/shape/query55.out | 0 .../tpcds_sf1000}/shape/query56.out | 0 .../tpcds_sf1000}/shape/query57.out | 0 .../tpcds_sf1000}/shape/query58.out | 0 .../tpcds_sf1000}/shape/query59.out | 0 .../tpcds_sf1000}/shape/query6.out | 0 .../tpcds_sf1000}/shape/query60.out | 0 .../tpcds_sf1000}/shape/query61.out | 0 .../tpcds_sf1000}/shape/query62.out | 0 .../tpcds_sf1000}/shape/query63.out | 0 .../tpcds_sf1000}/shape/query64.out | 0 .../tpcds_sf1000}/shape/query65.out | 0 .../tpcds_sf1000}/shape/query66.out | 0 .../tpcds_sf1000}/shape/query67.out | 0 .../tpcds_sf1000}/shape/query68.out | 0 .../tpcds_sf1000}/shape/query69.out | 0 .../tpcds_sf1000}/shape/query7.out | 0 .../tpcds_sf1000}/shape/query70.out | 0 .../tpcds_sf1000}/shape/query71.out | 0 .../tpcds_sf1000}/shape/query72.out | 0 .../tpcds_sf1000}/shape/query73.out | 0 .../tpcds_sf1000}/shape/query74.out | 0 .../tpcds_sf1000}/shape/query75.out | 0 .../tpcds_sf1000}/shape/query76.out | 0 .../tpcds_sf1000}/shape/query77.out | 0 .../tpcds_sf1000}/shape/query78.out | 0 .../tpcds_sf1000}/shape/query79.out | 0 .../tpcds_sf1000}/shape/query8.out | 0 .../tpcds_sf1000}/shape/query80.out | 0 .../tpcds_sf1000}/shape/query81.out | 0 .../tpcds_sf1000}/shape/query82.out | 0 .../tpcds_sf1000}/shape/query83.out | 0 .../tpcds_sf1000}/shape/query84.out | 0 .../tpcds_sf1000}/shape/query85.out | 0 .../tpcds_sf1000}/shape/query86.out | 0 .../tpcds_sf1000}/shape/query87.out | 0 .../tpcds_sf1000}/shape/query88.out | 0 .../tpcds_sf1000}/shape/query89.out | 0 .../tpcds_sf1000}/shape/query9.out | 0 .../tpcds_sf1000}/shape/query90.out | 0 .../tpcds_sf1000}/shape/query91.out | 0 .../tpcds_sf1000}/shape/query92.out | 0 .../tpcds_sf1000}/shape/query93.out | 0 .../tpcds_sf1000}/shape/query94.out | 0 .../tpcds_sf1000}/shape/query95.out | 0 .../tpcds_sf1000}/shape/query96.out | 0 .../tpcds_sf1000}/shape/query97.out | 0 .../tpcds_sf1000}/shape/query98.out | 0 .../tpcds_sf1000}/shape/query99.out | 0 .../tpcds_sf10t_orc}/shape/query1.out | 0 .../tpcds_sf10t_orc}/shape/query10.out | 0 .../tpcds_sf10t_orc}/shape/query11.out | 0 .../tpcds_sf10t_orc}/shape/query12.out | 0 .../tpcds_sf10t_orc}/shape/query13.out | 0 .../tpcds_sf10t_orc}/shape/query14.out | 0 .../tpcds_sf10t_orc}/shape/query15.out | 0 .../tpcds_sf10t_orc}/shape/query16.out | 0 .../tpcds_sf10t_orc}/shape/query17.out | 0 .../tpcds_sf10t_orc}/shape/query18.out | 0 .../tpcds_sf10t_orc}/shape/query19.out | 0 .../tpcds_sf10t_orc}/shape/query2.out | 0 .../tpcds_sf10t_orc}/shape/query20.out | 0 .../tpcds_sf10t_orc}/shape/query21.out | 0 .../tpcds_sf10t_orc}/shape/query22.out | 0 .../tpcds_sf10t_orc}/shape/query23.out | 0 .../tpcds_sf10t_orc}/shape/query24.out | 0 .../tpcds_sf10t_orc}/shape/query25.out | 0 .../tpcds_sf10t_orc}/shape/query26.out | 0 .../tpcds_sf10t_orc}/shape/query27.out | 0 .../tpcds_sf10t_orc}/shape/query28.out | 0 .../tpcds_sf10t_orc}/shape/query29.out | 0 .../tpcds_sf10t_orc}/shape/query3.out | 0 .../tpcds_sf10t_orc}/shape/query30.out | 0 .../tpcds_sf10t_orc}/shape/query31.out | 0 .../tpcds_sf10t_orc}/shape/query32.out | 0 .../tpcds_sf10t_orc}/shape/query33.out | 0 .../tpcds_sf10t_orc}/shape/query34.out | 0 .../tpcds_sf10t_orc}/shape/query35.out | 0 .../tpcds_sf10t_orc}/shape/query36.out | 0 .../tpcds_sf10t_orc}/shape/query37.out | 0 .../tpcds_sf10t_orc}/shape/query38.out | 0 .../tpcds_sf10t_orc}/shape/query39.out | 0 .../tpcds_sf10t_orc}/shape/query4.out | 0 .../tpcds_sf10t_orc}/shape/query40.out | 0 .../tpcds_sf10t_orc}/shape/query41.out | 0 .../tpcds_sf10t_orc}/shape/query42.out | 0 .../tpcds_sf10t_orc}/shape/query43.out | 0 .../tpcds_sf10t_orc}/shape/query44.out | 0 .../tpcds_sf10t_orc}/shape/query45.out | 0 .../tpcds_sf10t_orc}/shape/query46.out | 0 .../tpcds_sf10t_orc}/shape/query47.out | 0 .../tpcds_sf10t_orc}/shape/query48.out | 0 .../tpcds_sf10t_orc}/shape/query49.out | 0 .../tpcds_sf10t_orc}/shape/query5.out | 0 .../tpcds_sf10t_orc}/shape/query50.out | 0 .../tpcds_sf10t_orc}/shape/query51.out | 0 .../tpcds_sf10t_orc}/shape/query52.out | 0 .../tpcds_sf10t_orc}/shape/query53.out | 0 .../tpcds_sf10t_orc}/shape/query54.out | 0 .../tpcds_sf10t_orc}/shape/query55.out | 0 .../tpcds_sf10t_orc}/shape/query56.out | 0 .../tpcds_sf10t_orc}/shape/query57.out | 0 .../tpcds_sf10t_orc}/shape/query58.out | 0 .../tpcds_sf10t_orc}/shape/query59.out | 0 .../tpcds_sf10t_orc}/shape/query6.out | 0 .../tpcds_sf10t_orc}/shape/query60.out | 0 .../tpcds_sf10t_orc}/shape/query61.out | 0 .../tpcds_sf10t_orc}/shape/query62.out | 0 .../tpcds_sf10t_orc}/shape/query63.out | 0 .../tpcds_sf10t_orc}/shape/query64.out | 0 .../tpcds_sf10t_orc}/shape/query65.out | 0 .../tpcds_sf10t_orc}/shape/query66.out | 0 .../tpcds_sf10t_orc}/shape/query67.out | 0 .../tpcds_sf10t_orc}/shape/query68.out | 0 .../tpcds_sf10t_orc}/shape/query69.out | 0 .../tpcds_sf10t_orc}/shape/query7.out | 0 .../tpcds_sf10t_orc}/shape/query70.out | 0 .../tpcds_sf10t_orc}/shape/query71.out | 0 .../tpcds_sf10t_orc}/shape/query72.out | 0 .../tpcds_sf10t_orc}/shape/query73.out | 0 .../tpcds_sf10t_orc}/shape/query74.out | 0 .../tpcds_sf10t_orc}/shape/query75.out | 0 .../tpcds_sf10t_orc}/shape/query76.out | 0 .../tpcds_sf10t_orc}/shape/query77.out | 0 .../tpcds_sf10t_orc}/shape/query78.out | 0 .../tpcds_sf10t_orc}/shape/query79.out | 0 .../tpcds_sf10t_orc}/shape/query8.out | 0 .../tpcds_sf10t_orc}/shape/query80.out | 0 .../tpcds_sf10t_orc}/shape/query81.out | 0 .../tpcds_sf10t_orc}/shape/query82.out | 0 .../tpcds_sf10t_orc}/shape/query83.out | 0 .../tpcds_sf10t_orc}/shape/query84.out | 0 .../tpcds_sf10t_orc}/shape/query85.out | 0 .../tpcds_sf10t_orc}/shape/query86.out | 0 .../tpcds_sf10t_orc}/shape/query87.out | 0 .../tpcds_sf10t_orc}/shape/query88.out | 0 .../tpcds_sf10t_orc}/shape/query89.out | 0 .../tpcds_sf10t_orc}/shape/query9.out | 0 .../tpcds_sf10t_orc}/shape/query90.out | 0 .../tpcds_sf10t_orc}/shape/query91.out | 0 .../tpcds_sf10t_orc}/shape/query92.out | 0 .../tpcds_sf10t_orc}/shape/query93.out | 0 .../tpcds_sf10t_orc}/shape/query94.out | 0 .../tpcds_sf10t_orc}/shape/query95.out | 0 .../tpcds_sf10t_orc}/shape/query96.out | 0 .../tpcds_sf10t_orc}/shape/query97.out | 0 .../tpcds_sf10t_orc}/shape/query98.out | 0 .../tpcds_sf10t_orc}/shape/query99.out | 0 .../tpch_sf1000/hint}/q1.out | 0 .../tpch_sf1000/hint}/q10.out | 0 .../tpch_sf1000/hint}/q11.out | 0 .../tpch_sf1000/hint}/q12.out | 0 .../tpch_sf1000/hint}/q13.out | 0 .../tpch_sf1000/hint}/q14.out | 2 +- .../tpch_sf1000/hint}/q15.out | 9 +- .../tpch_sf1000/hint}/q17.out | 0 .../tpch_sf1000/hint}/q19.out | 0 .../tpch_sf1000/hint}/q3.out | 2 +- .../tpch_sf1000/hint}/q4.out | 0 .../tpch_sf1000/hint}/q5.out | 0 .../tpch_sf1000/hint}/q6.out | 0 .../tpch_sf1000/hint}/q7.out | 2 +- .../tpch_sf1000/hint}/q8.out | 4 +- .../tpch_sf1000/hint}/q9.out | 0 .../tpch_sf1000}/nostats_rf_prune/q1.out | 0 .../tpch_sf1000}/nostats_rf_prune/q10.out | 0 .../tpch_sf1000}/nostats_rf_prune/q11.out | 0 .../tpch_sf1000}/nostats_rf_prune/q12.out | 0 .../tpch_sf1000}/nostats_rf_prune/q13.out | 0 .../tpch_sf1000}/nostats_rf_prune/q14.out | 0 .../tpch_sf1000}/nostats_rf_prune/q15.out | 0 .../tpch_sf1000}/nostats_rf_prune/q16.out | 0 .../tpch_sf1000}/nostats_rf_prune/q17.out | 0 .../tpch_sf1000}/nostats_rf_prune/q18.out | 0 .../tpch_sf1000}/nostats_rf_prune/q19.out | 0 .../tpch_sf1000}/nostats_rf_prune/q2.out | 0 .../nostats_rf_prune/q20-rewrite.out | 0 .../tpch_sf1000}/nostats_rf_prune/q20.out | 0 .../tpch_sf1000}/nostats_rf_prune/q21.out | 0 .../tpch_sf1000}/nostats_rf_prune/q22.out | 0 .../tpch_sf1000}/nostats_rf_prune/q3.out | 0 .../tpch_sf1000}/nostats_rf_prune/q4.out | 0 .../tpch_sf1000}/nostats_rf_prune/q5.out | 0 .../tpch_sf1000}/nostats_rf_prune/q6.out | 0 .../tpch_sf1000}/nostats_rf_prune/q7.out | 0 .../tpch_sf1000}/nostats_rf_prune/q8.out | 0 .../tpch_sf1000}/nostats_rf_prune/q9.out | 0 .../tpch_sf1000}/rf_prune/q1.out | 0 .../tpch_sf1000}/rf_prune/q10.out | 0 .../tpch_sf1000}/rf_prune/q11.out | 0 .../tpch_sf1000}/rf_prune/q12.out | 0 .../tpch_sf1000}/rf_prune/q13.out | 0 .../tpch_sf1000}/rf_prune/q14.out | 0 .../tpch_sf1000}/rf_prune/q15.out | 0 .../tpch_sf1000}/rf_prune/q16.out | 0 .../tpch_sf1000}/rf_prune/q17.out | 0 .../tpch_sf1000}/rf_prune/q18.out | 0 .../tpch_sf1000}/rf_prune/q19.out | 0 .../tpch_sf1000}/rf_prune/q2.out | 0 .../tpch_sf1000}/rf_prune/q20-rewrite.out | 0 .../tpch_sf1000}/rf_prune/q20.out | 0 .../tpch_sf1000}/rf_prune/q21.out | 0 .../tpch_sf1000}/rf_prune/q22.out | 0 .../tpch_sf1000}/rf_prune/q3.out | 0 .../tpch_sf1000}/rf_prune/q4.out | 0 .../tpch_sf1000}/rf_prune/q5.out | 0 .../tpch_sf1000}/rf_prune/q6.out | 0 .../tpch_sf1000}/rf_prune/q7.out | 0 .../tpch_sf1000}/rf_prune/q8.out | 0 .../tpch_sf1000}/rf_prune/q9.out | 0 .../runtime_filter/test_pushdown_setop.out | 0 .../tpch_sf1000}/shape/q1.out | 0 .../tpch_sf1000}/shape/q10.out | 0 .../tpch_sf1000}/shape/q11.out | 0 .../tpch_sf1000}/shape/q12.out | 0 .../tpch_sf1000}/shape/q13.out | 0 .../tpch_sf1000}/shape/q14.out | 0 .../tpch_sf1000}/shape/q15.out | 0 .../tpch_sf1000}/shape/q16.out | 0 .../tpch_sf1000}/shape/q17.out | 0 .../tpch_sf1000}/shape/q18.out | 0 .../tpch_sf1000}/shape/q19.out | 0 .../tpch_sf1000}/shape/q2.out | 0 .../tpch_sf1000}/shape/q20-rewrite.out | 0 .../tpch_sf1000}/shape/q20.out | 0 .../tpch_sf1000}/shape/q21.out | 0 .../tpch_sf1000}/shape/q22.out | 0 .../tpch_sf1000}/shape/q3.out | 0 .../tpch_sf1000}/shape/q4.out | 0 .../tpch_sf1000}/shape/q5.out | 0 .../tpch_sf1000}/shape/q6.out | 0 .../tpch_sf1000}/shape/q7.out | 0 .../tpch_sf1000}/shape/q8.out | 0 .../tpch_sf1000}/shape/q9.out | 0 .../tpch_sf1000}/shape_no_stats/q1.out | 0 .../tpch_sf1000}/shape_no_stats/q10.out | 0 .../tpch_sf1000}/shape_no_stats/q11.out | 0 .../tpch_sf1000}/shape_no_stats/q12.out | 0 .../tpch_sf1000}/shape_no_stats/q13.out | 0 .../tpch_sf1000}/shape_no_stats/q14.out | 0 .../tpch_sf1000}/shape_no_stats/q15.out | 0 .../tpch_sf1000}/shape_no_stats/q16.out | 0 .../tpch_sf1000}/shape_no_stats/q17.out | 0 .../tpch_sf1000}/shape_no_stats/q18.out | 0 .../tpch_sf1000}/shape_no_stats/q19.out | 0 .../tpch_sf1000}/shape_no_stats/q2.out | 0 .../shape_no_stats/q20-rewrite.out | 0 .../tpch_sf1000}/shape_no_stats/q20.out | 0 .../tpch_sf1000}/shape_no_stats/q21.out | 0 .../tpch_sf1000}/shape_no_stats/q22.out | 0 .../tpch_sf1000}/shape_no_stats/q3.out | 0 .../tpch_sf1000}/shape_no_stats/q4.out | 0 .../tpch_sf1000}/shape_no_stats/q5.out | 0 .../tpch_sf1000}/shape_no_stats/q6.out | 0 .../tpch_sf1000}/shape_no_stats/q7.out | 0 .../tpch_sf1000}/shape_no_stats/q8.out | 0 .../tpch_sf1000}/shape_no_stats/q9.out | 0 .../new_shapes_p0/clickbench/load.groovy | 149 - .../new_shapes_p0/clickbench/query1.groovy | 34 - .../new_shapes_p0/clickbench/query10.groovy | 35 - .../new_shapes_p0/clickbench/query11.groovy | 36 - .../new_shapes_p0/clickbench/query12.groovy | 36 - .../new_shapes_p0/clickbench/query13.groovy | 36 - .../new_shapes_p0/clickbench/query14.groovy | 36 - .../new_shapes_p0/clickbench/query15.groovy | 36 - .../new_shapes_p0/clickbench/query16.groovy | 36 - .../new_shapes_p0/clickbench/query17.groovy | 36 - .../new_shapes_p0/clickbench/query18.groovy | 36 - .../new_shapes_p0/clickbench/query19.groovy | 36 - .../new_shapes_p0/clickbench/query2.groovy | 36 - .../new_shapes_p0/clickbench/query20.groovy | 37 - .../new_shapes_p0/clickbench/query21.groovy | 36 - .../new_shapes_p0/clickbench/query22.groovy | 36 - .../new_shapes_p0/clickbench/query23.groovy | 36 - .../new_shapes_p0/clickbench/query24.groovy | 36 - .../new_shapes_p0/clickbench/query25.groovy | 36 - .../new_shapes_p0/clickbench/query26.groovy | 36 - .../new_shapes_p0/clickbench/query27.groovy | 36 - .../new_shapes_p0/clickbench/query28.groovy | 36 - .../new_shapes_p0/clickbench/query29.groovy | 36 - .../new_shapes_p0/clickbench/query3.groovy | 34 - .../new_shapes_p0/clickbench/query30.groovy | 36 - .../new_shapes_p0/clickbench/query31.groovy | 36 - .../new_shapes_p0/clickbench/query32.groovy | 36 - .../new_shapes_p0/clickbench/query33.groovy | 36 - .../new_shapes_p0/clickbench/query34.groovy | 36 - .../new_shapes_p0/clickbench/query35.groovy | 36 - .../new_shapes_p0/clickbench/query36.groovy | 36 - .../new_shapes_p0/clickbench/query37.groovy | 36 - .../new_shapes_p0/clickbench/query38.groovy | 36 - .../new_shapes_p0/clickbench/query39.groovy | 36 - .../new_shapes_p0/clickbench/query4.groovy | 34 - .../new_shapes_p0/clickbench/query40.groovy | 36 - .../new_shapes_p0/clickbench/query41.groovy | 36 - .../new_shapes_p0/clickbench/query42.groovy | 36 - .../new_shapes_p0/clickbench/query43.groovy | 35 - .../new_shapes_p0/clickbench/query5.groovy | 34 - .../new_shapes_p0/clickbench/query6.groovy | 34 - .../new_shapes_p0/clickbench/query7.groovy | 35 - .../new_shapes_p0/clickbench/query8.groovy | 35 - .../new_shapes_p0/clickbench/query9.groovy | 36 - .../new_shapes_p0/hint_tpcds/ddl/gen_shape.py | 26 - .../new_shapes_p0/hint_tpcds/ddl/shape.tmpl | 43 - .../new_shapes_p0/hint_tpcds/load.groovy | 812 ---- .../hint_tpcds/shape/query1.groovy | 89 - .../hint_tpcds/shape/query24.groovy | 148 - .../hint_tpcds/shape/query64.groovy | 284 -- .../hint_tpcds/shape/query67.groovy | 126 - .../hint_tpcds/shape/query72.groovy | 96 - .../hint_tpcds/shape/query78.groovy | 159 - .../new_shapes_p0/hint_tpch/load.groovy | 226 - .../new_shapes_p0/hint_tpch/shape/q10.groovy | 76 - .../new_shapes_p0/hint_tpch/shape/q11.groovy | 72 - .../new_shapes_p0/hint_tpch/shape/q12.groovy | 72 - .../new_shapes_p0/hint_tpch/shape/q13.groovy | 61 - .../new_shapes_p0/hint_tpch/shape/q14.groovy | 54 - .../new_shapes_p0/hint_tpch/shape/q15.groovy | 65 - .../new_shapes_p0/hint_tpch/shape/q17.groovy | 63 - .../new_shapes_p0/hint_tpch/shape/q19.groovy | 81 - .../new_shapes_p0/hint_tpch/shape/q3.groovy | 68 - .../new_shapes_p0/hint_tpch/shape/q4.groovy | 67 - .../new_shapes_p0/hint_tpch/shape/q5.groovy | 64 - .../new_shapes_p0/hint_tpch/shape/q7.groovy | 79 - .../new_shapes_p0/hint_tpch/shape/q8.groovy | 83 - .../new_shapes_p0/hint_tpch/shape/q9.groovy | 73 - .../new_shapes_p0/ssb_sf100/load.groovy | 218 - .../new_shapes_p0/ssb_sf100/shape/flat.groovy | 110 - .../new_shapes_p0/ssb_sf100/shape/q1.1.groovy | 50 - .../new_shapes_p0/ssb_sf100/shape/q1.2.groovy | 50 - .../new_shapes_p0/ssb_sf100/shape/q1.3.groovy | 52 - .../new_shapes_p0/ssb_sf100/shape/q2.1.groovy | 53 - .../new_shapes_p0/ssb_sf100/shape/q2.2.groovy | 53 - .../new_shapes_p0/ssb_sf100/shape/q2.3.groovy | 53 - .../new_shapes_p0/ssb_sf100/shape/q3.1.groovy | 59 - .../new_shapes_p0/ssb_sf100/shape/q3.2.groovy | 59 - .../new_shapes_p0/ssb_sf100/shape/q3.3.groovy | 65 - .../new_shapes_p0/ssb_sf100/shape/q3.4.groovy | 64 - .../new_shapes_p0/ssb_sf100/shape/q4.1.groovy | 61 - .../new_shapes_p0/ssb_sf100/shape/q4.2.groovy | 66 - .../new_shapes_p0/ssb_sf100/shape/q4.3.groovy | 62 - .../tpcds_sf100/constraints/load.groovy | 2552 ---------- .../tpcds_sf100/constraints/query23.groovy | 98 - .../tpcds_sf100/ddl/gen_rf_prune.py | 26 - .../tpcds_sf100/ddl/gen_shape.py | 26 - .../tpcds_sf100/ddl/rf_prune.tmpl | 43 - .../new_shapes_p0/tpcds_sf100/ddl/shape.tmpl | 42 - .../new_shapes_p0/tpcds_sf100/load.groovy | 4127 ----------------- .../tpcds_sf100/noStatsRfPrune/query1.groovy | 66 - .../tpcds_sf100/noStatsRfPrune/query10.groovy | 105 - .../tpcds_sf100/noStatsRfPrune/query11.groovy | 127 - .../tpcds_sf100/noStatsRfPrune/query12.groovy | 80 - .../tpcds_sf100/noStatsRfPrune/query13.groovy | 96 - .../tpcds_sf100/noStatsRfPrune/query14.groovy | 148 - .../tpcds_sf100/noStatsRfPrune/query15.groovy | 65 - .../tpcds_sf100/noStatsRfPrune/query16.groovy | 77 - .../tpcds_sf100/noStatsRfPrune/query17.groovy | 91 - .../tpcds_sf100/noStatsRfPrune/query18.groovy | 80 - .../tpcds_sf100/noStatsRfPrune/query19.groovy | 71 - .../tpcds_sf100/noStatsRfPrune/query2.groovy | 107 - .../tpcds_sf100/noStatsRfPrune/query20.groovy | 76 - .../tpcds_sf100/noStatsRfPrune/query21.groovy | 77 - .../tpcds_sf100/noStatsRfPrune/query22.groovy | 66 - .../tpcds_sf100/noStatsRfPrune/query23.groovy | 101 - .../tpcds_sf100/noStatsRfPrune/query24.groovy | 100 - .../tpcds_sf100/noStatsRfPrune/query25.groovy | 93 - .../tpcds_sf100/noStatsRfPrune/query26.groovy | 67 - .../tpcds_sf100/noStatsRfPrune/query27.groovy | 68 - .../tpcds_sf100/noStatsRfPrune/query28.groovy | 99 - .../tpcds_sf100/noStatsRfPrune/query29.groovy | 93 - .../tpcds_sf100/noStatsRfPrune/query3.groovy | 68 - .../tpcds_sf100/noStatsRfPrune/query30.groovy | 77 - .../tpcds_sf100/noStatsRfPrune/query31.groovy | 98 - .../tpcds_sf100/noStatsRfPrune/query32.groovy | 78 - .../tpcds_sf100/noStatsRfPrune/query33.groovy | 121 - .../tpcds_sf100/noStatsRfPrune/query34.groovy | 77 - .../tpcds_sf100/noStatsRfPrune/query35.groovy | 104 - .../tpcds_sf100/noStatsRfPrune/query36.groovy | 76 - .../tpcds_sf100/noStatsRfPrune/query37.groovy | 63 - .../tpcds_sf100/noStatsRfPrune/query38.groovy | 73 - .../tpcds_sf100/noStatsRfPrune/query39.groovy | 74 - .../tpcds_sf100/noStatsRfPrune/query4.groovy | 160 - .../tpcds_sf100/noStatsRfPrune/query40.groovy | 73 - .../tpcds_sf100/noStatsRfPrune/query41.groovy | 98 - .../tpcds_sf100/noStatsRfPrune/query42.groovy | 68 - .../tpcds_sf100/noStatsRfPrune/query43.groovy | 65 - .../tpcds_sf100/noStatsRfPrune/query44.groovy | 81 - .../tpcds_sf100/noStatsRfPrune/query45.groovy | 66 - .../tpcds_sf100/noStatsRfPrune/query46.groovy | 81 - .../tpcds_sf100/noStatsRfPrune/query47.groovy | 95 - .../tpcds_sf100/noStatsRfPrune/query48.groovy | 113 - .../tpcds_sf100/noStatsRfPrune/query49.groovy | 175 - .../tpcds_sf100/noStatsRfPrune/query5.groovy | 174 - .../tpcds_sf100/noStatsRfPrune/query50.groovy | 105 - .../tpcds_sf100/noStatsRfPrune/query51.groovy | 91 - .../tpcds_sf100/noStatsRfPrune/query52.groovy | 68 - .../tpcds_sf100/noStatsRfPrune/query53.groovy | 74 - .../tpcds_sf100/noStatsRfPrune/query54.groovy | 102 - .../tpcds_sf100/noStatsRfPrune/query55.groovy | 60 - .../tpcds_sf100/noStatsRfPrune/query56.groovy | 115 - .../tpcds_sf100/noStatsRfPrune/query57.groovy | 93 - .../tpcds_sf100/noStatsRfPrune/query58.groovy | 111 - .../tpcds_sf100/noStatsRfPrune/query59.groovy | 89 - .../tpcds_sf100/noStatsRfPrune/query6.groovy | 75 - .../tpcds_sf100/noStatsRfPrune/query60.groovy | 124 - .../tpcds_sf100/noStatsRfPrune/query61.groovy | 90 - .../tpcds_sf100/noStatsRfPrune/query62.groovy | 81 - .../tpcds_sf100/noStatsRfPrune/query63.groovy | 75 - .../tpcds_sf100/noStatsRfPrune/query64.groovy | 165 - .../tpcds_sf100/noStatsRfPrune/query65.groovy | 75 - .../tpcds_sf100/noStatsRfPrune/query66.groovy | 266 -- .../tpcds_sf100/noStatsRfPrune/query67.groovy | 90 - .../tpcds_sf100/noStatsRfPrune/query68.groovy | 88 - .../tpcds_sf100/noStatsRfPrune/query69.groovy | 93 - .../tpcds_sf100/noStatsRfPrune/query7.groovy | 67 - .../tpcds_sf100/noStatsRfPrune/query70.groovy | 84 - .../tpcds_sf100/noStatsRfPrune/query71.groovy | 86 - .../tpcds_sf100/noStatsRfPrune/query72.groovy | 75 - .../tpcds_sf100/noStatsRfPrune/query73.groovy | 74 - .../tpcds_sf100/noStatsRfPrune/query74.groovy | 106 - .../tpcds_sf100/noStatsRfPrune/query75.groovy | 116 - .../tpcds_sf100/noStatsRfPrune/query76.groovy | 70 - .../tpcds_sf100/noStatsRfPrune/query77.groovy | 154 - .../tpcds_sf100/noStatsRfPrune/query78.groovy | 104 - .../tpcds_sf100/noStatsRfPrune/query79.groovy | 69 - .../tpcds_sf100/noStatsRfPrune/query8.groovy | 154 - .../tpcds_sf100/noStatsRfPrune/query80.groovy | 142 - .../tpcds_sf100/noStatsRfPrune/query81.groovy | 77 - .../tpcds_sf100/noStatsRfPrune/query82.groovy | 63 - .../tpcds_sf100/noStatsRfPrune/query83.groovy | 113 - .../tpcds_sf100/noStatsRfPrune/query84.groovy | 67 - .../tpcds_sf100/noStatsRfPrune/query85.groovy | 130 - .../tpcds_sf100/noStatsRfPrune/query86.groovy | 72 - .../tpcds_sf100/noStatsRfPrune/query87.groovy | 69 - .../tpcds_sf100/noStatsRfPrune/query88.groovy | 140 - .../tpcds_sf100/noStatsRfPrune/query89.groovy | 74 - .../tpcds_sf100/noStatsRfPrune/query9.groovy | 98 - .../tpcds_sf100/noStatsRfPrune/query90.groovy | 68 - .../tpcds_sf100/noStatsRfPrune/query91.groovy | 77 - .../tpcds_sf100/noStatsRfPrune/query92.groovy | 76 - .../tpcds_sf100/noStatsRfPrune/query93.groovy | 64 - .../tpcds_sf100/noStatsRfPrune/query94.groovy | 75 - .../tpcds_sf100/noStatsRfPrune/query95.groovy | 76 - .../tpcds_sf100/noStatsRfPrune/query96.groovy | 62 - .../tpcds_sf100/noStatsRfPrune/query97.groovy | 75 - .../tpcds_sf100/noStatsRfPrune/query98.groovy | 79 - .../tpcds_sf100/noStatsRfPrune/query99.groovy | 81 - .../tpcds_sf100/no_stats_shape/query1.groovy | 66 - .../tpcds_sf100/no_stats_shape/query10.groovy | 105 - .../tpcds_sf100/no_stats_shape/query11.groovy | 127 - .../tpcds_sf100/no_stats_shape/query12.groovy | 80 - .../tpcds_sf100/no_stats_shape/query13.groovy | 96 - .../tpcds_sf100/no_stats_shape/query14.groovy | 148 - .../tpcds_sf100/no_stats_shape/query15.groovy | 65 - .../tpcds_sf100/no_stats_shape/query16.groovy | 77 - .../tpcds_sf100/no_stats_shape/query17.groovy | 91 - .../tpcds_sf100/no_stats_shape/query18.groovy | 80 - .../tpcds_sf100/no_stats_shape/query19.groovy | 71 - .../tpcds_sf100/no_stats_shape/query2.groovy | 107 - .../tpcds_sf100/no_stats_shape/query20.groovy | 76 - .../tpcds_sf100/no_stats_shape/query21.groovy | 77 - .../tpcds_sf100/no_stats_shape/query22.groovy | 66 - .../tpcds_sf100/no_stats_shape/query23.groovy | 101 - .../tpcds_sf100/no_stats_shape/query24.groovy | 100 - .../tpcds_sf100/no_stats_shape/query25.groovy | 93 - .../tpcds_sf100/no_stats_shape/query26.groovy | 67 - .../tpcds_sf100/no_stats_shape/query27.groovy | 68 - .../tpcds_sf100/no_stats_shape/query28.groovy | 99 - .../tpcds_sf100/no_stats_shape/query29.groovy | 93 - .../tpcds_sf100/no_stats_shape/query3.groovy | 68 - .../tpcds_sf100/no_stats_shape/query30.groovy | 77 - .../tpcds_sf100/no_stats_shape/query31.groovy | 98 - .../tpcds_sf100/no_stats_shape/query32.groovy | 78 - .../tpcds_sf100/no_stats_shape/query33.groovy | 121 - .../tpcds_sf100/no_stats_shape/query34.groovy | 77 - .../tpcds_sf100/no_stats_shape/query35.groovy | 104 - .../tpcds_sf100/no_stats_shape/query36.groovy | 76 - .../tpcds_sf100/no_stats_shape/query37.groovy | 63 - .../tpcds_sf100/no_stats_shape/query38.groovy | 73 - .../tpcds_sf100/no_stats_shape/query39.groovy | 74 - .../tpcds_sf100/no_stats_shape/query4.groovy | 160 - .../tpcds_sf100/no_stats_shape/query40.groovy | 73 - .../tpcds_sf100/no_stats_shape/query41.groovy | 98 - .../tpcds_sf100/no_stats_shape/query42.groovy | 68 - .../tpcds_sf100/no_stats_shape/query43.groovy | 65 - .../tpcds_sf100/no_stats_shape/query44.groovy | 81 - .../tpcds_sf100/no_stats_shape/query45.groovy | 66 - .../tpcds_sf100/no_stats_shape/query46.groovy | 81 - .../tpcds_sf100/no_stats_shape/query47.groovy | 95 - .../tpcds_sf100/no_stats_shape/query48.groovy | 113 - .../tpcds_sf100/no_stats_shape/query49.groovy | 175 - .../tpcds_sf100/no_stats_shape/query5.groovy | 174 - .../tpcds_sf100/no_stats_shape/query50.groovy | 105 - .../tpcds_sf100/no_stats_shape/query51.groovy | 91 - .../tpcds_sf100/no_stats_shape/query52.groovy | 68 - .../tpcds_sf100/no_stats_shape/query53.groovy | 74 - .../tpcds_sf100/no_stats_shape/query54.groovy | 102 - .../tpcds_sf100/no_stats_shape/query55.groovy | 60 - .../tpcds_sf100/no_stats_shape/query56.groovy | 115 - .../tpcds_sf100/no_stats_shape/query57.groovy | 93 - .../tpcds_sf100/no_stats_shape/query58.groovy | 111 - .../tpcds_sf100/no_stats_shape/query59.groovy | 89 - .../tpcds_sf100/no_stats_shape/query6.groovy | 75 - .../tpcds_sf100/no_stats_shape/query60.groovy | 124 - .../tpcds_sf100/no_stats_shape/query61.groovy | 90 - .../tpcds_sf100/no_stats_shape/query62.groovy | 81 - .../tpcds_sf100/no_stats_shape/query63.groovy | 75 - .../tpcds_sf100/no_stats_shape/query64.groovy | 165 - .../tpcds_sf100/no_stats_shape/query65.groovy | 75 - .../tpcds_sf100/no_stats_shape/query66.groovy | 266 -- .../tpcds_sf100/no_stats_shape/query67.groovy | 90 - .../tpcds_sf100/no_stats_shape/query68.groovy | 88 - .../tpcds_sf100/no_stats_shape/query69.groovy | 93 - .../tpcds_sf100/no_stats_shape/query7.groovy | 67 - .../tpcds_sf100/no_stats_shape/query70.groovy | 84 - .../tpcds_sf100/no_stats_shape/query71.groovy | 86 - .../tpcds_sf100/no_stats_shape/query72.groovy | 75 - .../tpcds_sf100/no_stats_shape/query73.groovy | 74 - .../tpcds_sf100/no_stats_shape/query74.groovy | 106 - .../tpcds_sf100/no_stats_shape/query75.groovy | 116 - .../tpcds_sf100/no_stats_shape/query76.groovy | 70 - .../tpcds_sf100/no_stats_shape/query77.groovy | 154 - .../tpcds_sf100/no_stats_shape/query78.groovy | 104 - .../tpcds_sf100/no_stats_shape/query79.groovy | 69 - .../tpcds_sf100/no_stats_shape/query8.groovy | 154 - .../tpcds_sf100/no_stats_shape/query80.groovy | 142 - .../tpcds_sf100/no_stats_shape/query81.groovy | 77 - .../tpcds_sf100/no_stats_shape/query82.groovy | 63 - .../tpcds_sf100/no_stats_shape/query83.groovy | 113 - .../tpcds_sf100/no_stats_shape/query84.groovy | 67 - .../tpcds_sf100/no_stats_shape/query85.groovy | 130 - .../tpcds_sf100/no_stats_shape/query86.groovy | 72 - .../tpcds_sf100/no_stats_shape/query87.groovy | 69 - .../tpcds_sf100/no_stats_shape/query88.groovy | 140 - .../tpcds_sf100/no_stats_shape/query89.groovy | 74 - .../tpcds_sf100/no_stats_shape/query9.groovy | 98 - .../tpcds_sf100/no_stats_shape/query90.groovy | 68 - .../tpcds_sf100/no_stats_shape/query91.groovy | 77 - .../tpcds_sf100/no_stats_shape/query92.groovy | 76 - .../tpcds_sf100/no_stats_shape/query93.groovy | 64 - .../tpcds_sf100/no_stats_shape/query94.groovy | 75 - .../tpcds_sf100/no_stats_shape/query95.groovy | 76 - .../tpcds_sf100/no_stats_shape/query96.groovy | 62 - .../tpcds_sf100/no_stats_shape/query97.groovy | 75 - .../tpcds_sf100/no_stats_shape/query98.groovy | 79 - .../tpcds_sf100/no_stats_shape/query99.groovy | 81 - .../tpcds_sf100/rf_prune/query1.groovy | 66 - .../tpcds_sf100/rf_prune/query10.groovy | 100 - .../tpcds_sf100/rf_prune/query11.groovy | 122 - .../tpcds_sf100/rf_prune/query12.groovy | 75 - .../tpcds_sf100/rf_prune/query13.groovy | 93 - .../tpcds_sf100/rf_prune/query14.groovy | 145 - .../tpcds_sf100/rf_prune/query15.groovy | 61 - .../tpcds_sf100/rf_prune/query16.groovy | 72 - .../tpcds_sf100/rf_prune/query17.groovy | 86 - .../tpcds_sf100/rf_prune/query18.groovy | 75 - .../tpcds_sf100/rf_prune/query19.groovy | 66 - .../tpcds_sf100/rf_prune/query2.groovy | 101 - .../tpcds_sf100/rf_prune/query20.groovy | 71 - .../tpcds_sf100/rf_prune/query21.groovy | 72 - .../tpcds_sf100/rf_prune/query22.groovy | 61 - .../tpcds_sf100/rf_prune/query23.groovy | 99 - .../tpcds_sf100/rf_prune/query24.groovy | 96 - .../tpcds_sf100/rf_prune/query25.groovy | 89 - .../tpcds_sf100/rf_prune/query26.groovy | 62 - .../tpcds_sf100/rf_prune/query27.groovy | 64 - .../tpcds_sf100/rf_prune/query28.groovy | 94 - .../tpcds_sf100/rf_prune/query29.groovy | 88 - .../tpcds_sf100/rf_prune/query3.groovy | 62 - .../tpcds_sf100/rf_prune/query30.groovy | 72 - .../tpcds_sf100/rf_prune/query31.groovy | 93 - .../tpcds_sf100/rf_prune/query32.groovy | 75 - .../tpcds_sf100/rf_prune/query33.groovy | 116 - .../tpcds_sf100/rf_prune/query34.groovy | 72 - .../tpcds_sf100/rf_prune/query35.groovy | 99 - .../tpcds_sf100/rf_prune/query36.groovy | 71 - .../tpcds_sf100/rf_prune/query37.groovy | 57 - .../tpcds_sf100/rf_prune/query38.groovy | 69 - .../tpcds_sf100/rf_prune/query39.groovy | 67 - .../tpcds_sf100/rf_prune/query4.groovy | 157 - .../tpcds_sf100/rf_prune/query40.groovy | 68 - .../tpcds_sf100/rf_prune/query41.groovy | 92 - .../tpcds_sf100/rf_prune/query42.groovy | 62 - .../tpcds_sf100/rf_prune/query43.groovy | 59 - .../tpcds_sf100/rf_prune/query44.groovy | 75 - .../tpcds_sf100/rf_prune/query45.groovy | 60 - .../tpcds_sf100/rf_prune/query46.groovy | 75 - .../tpcds_sf100/rf_prune/query47.groovy | 91 - .../tpcds_sf100/rf_prune/query48.groovy | 107 - .../tpcds_sf100/rf_prune/query49.groovy | 169 - .../tpcds_sf100/rf_prune/query5.groovy | 169 - .../tpcds_sf100/rf_prune/query50.groovy | 99 - .../tpcds_sf100/rf_prune/query51.groovy | 85 - .../tpcds_sf100/rf_prune/query52.groovy | 62 - .../tpcds_sf100/rf_prune/query53.groovy | 68 - .../tpcds_sf100/rf_prune/query54.groovy | 96 - .../tpcds_sf100/rf_prune/query55.groovy | 54 - .../tpcds_sf100/rf_prune/query56.groovy | 109 - .../tpcds_sf100/rf_prune/query57.groovy | 88 - .../tpcds_sf100/rf_prune/query58.groovy | 105 - .../tpcds_sf100/rf_prune/query59.groovy | 84 - .../tpcds_sf100/rf_prune/query6.groovy | 67 - .../tpcds_sf100/rf_prune/query60.groovy | 118 - .../tpcds_sf100/rf_prune/query61.groovy | 84 - .../tpcds_sf100/rf_prune/query62.groovy | 75 - .../tpcds_sf100/rf_prune/query63.groovy | 69 - .../tpcds_sf100/rf_prune/query64.groovy | 161 - .../tpcds_sf100/rf_prune/query65.groovy | 69 - .../tpcds_sf100/rf_prune/query66.groovy | 260 -- .../tpcds_sf100/rf_prune/query67.groovy | 84 - .../tpcds_sf100/rf_prune/query68.groovy | 82 - .../tpcds_sf100/rf_prune/query69.groovy | 87 - .../tpcds_sf100/rf_prune/query7.groovy | 62 - .../tpcds_sf100/rf_prune/query70.groovy | 78 - .../tpcds_sf100/rf_prune/query71.groovy | 80 - .../tpcds_sf100/rf_prune/query72.groovy | 69 - .../tpcds_sf100/rf_prune/query73.groovy | 68 - .../tpcds_sf100/rf_prune/query74.groovy | 101 - .../tpcds_sf100/rf_prune/query75.groovy | 110 - .../tpcds_sf100/rf_prune/query76.groovy | 64 - .../tpcds_sf100/rf_prune/query77.groovy | 148 - .../tpcds_sf100/rf_prune/query78.groovy | 98 - .../tpcds_sf100/rf_prune/query79.groovy | 63 - .../tpcds_sf100/rf_prune/query8.groovy | 149 - .../tpcds_sf100/rf_prune/query80.groovy | 136 - .../tpcds_sf100/rf_prune/query81.groovy | 71 - .../tpcds_sf100/rf_prune/query82.groovy | 57 - .../tpcds_sf100/rf_prune/query83.groovy | 107 - .../tpcds_sf100/rf_prune/query84.groovy | 61 - .../tpcds_sf100/rf_prune/query85.groovy | 124 - .../tpcds_sf100/rf_prune/query86.groovy | 66 - .../tpcds_sf100/rf_prune/query87.groovy | 63 - .../tpcds_sf100/rf_prune/query88.groovy | 134 - .../tpcds_sf100/rf_prune/query89.groovy | 68 - .../tpcds_sf100/rf_prune/query9.groovy | 92 - .../tpcds_sf100/rf_prune/query90.groovy | 62 - .../tpcds_sf100/rf_prune/query91.groovy | 71 - .../tpcds_sf100/rf_prune/query92.groovy | 70 - .../tpcds_sf100/rf_prune/query93.groovy | 58 - .../tpcds_sf100/rf_prune/query94.groovy | 69 - .../tpcds_sf100/rf_prune/query95.groovy | 72 - .../tpcds_sf100/rf_prune/query96.groovy | 56 - .../tpcds_sf100/rf_prune/query97.groovy | 71 - .../tpcds_sf100/rf_prune/query98.groovy | 73 - .../tpcds_sf100/rf_prune/query99.groovy | 75 - .../tpcds_sf100/shape/query1.groovy | 65 - .../tpcds_sf100/shape/query10.groovy | 99 - .../tpcds_sf100/shape/query11.groovy | 121 - .../tpcds_sf100/shape/query12.groovy | 74 - .../tpcds_sf100/shape/query13.groovy | 92 - .../tpcds_sf100/shape/query14.groovy | 144 - .../tpcds_sf100/shape/query15.groovy | 60 - .../tpcds_sf100/shape/query16.groovy | 71 - .../tpcds_sf100/shape/query17.groovy | 85 - .../tpcds_sf100/shape/query18.groovy | 74 - .../tpcds_sf100/shape/query19.groovy | 65 - .../tpcds_sf100/shape/query2.groovy | 100 - .../tpcds_sf100/shape/query20.groovy | 70 - .../tpcds_sf100/shape/query21.groovy | 71 - .../tpcds_sf100/shape/query22.groovy | 60 - .../tpcds_sf100/shape/query23.groovy | 98 - .../tpcds_sf100/shape/query24.groovy | 95 - .../tpcds_sf100/shape/query25.groovy | 88 - .../tpcds_sf100/shape/query26.groovy | 61 - .../tpcds_sf100/shape/query27.groovy | 63 - .../tpcds_sf100/shape/query28.groovy | 93 - .../tpcds_sf100/shape/query29.groovy | 87 - .../tpcds_sf100/shape/query3.groovy | 61 - .../tpcds_sf100/shape/query30.groovy | 71 - .../tpcds_sf100/shape/query31.groovy | 92 - .../tpcds_sf100/shape/query32.groovy | 74 - .../tpcds_sf100/shape/query33.groovy | 115 - .../tpcds_sf100/shape/query34.groovy | 71 - .../tpcds_sf100/shape/query35.groovy | 98 - .../tpcds_sf100/shape/query36.groovy | 70 - .../tpcds_sf100/shape/query37.groovy | 57 - .../tpcds_sf100/shape/query38.groovy | 69 - .../tpcds_sf100/shape/query39.groovy | 67 - .../tpcds_sf100/shape/query4.groovy | 156 - .../tpcds_sf100/shape/query40.groovy | 68 - .../tpcds_sf100/shape/query41.groovy | 92 - .../tpcds_sf100/shape/query42.groovy | 62 - .../tpcds_sf100/shape/query43.groovy | 59 - .../tpcds_sf100/shape/query44.groovy | 75 - .../tpcds_sf100/shape/query45.groovy | 60 - .../tpcds_sf100/shape/query46.groovy | 75 - .../tpcds_sf100/shape/query47.groovy | 91 - .../tpcds_sf100/shape/query48.groovy | 107 - .../tpcds_sf100/shape/query49.groovy | 169 - .../tpcds_sf100/shape/query5.groovy | 168 - .../tpcds_sf100/shape/query50.groovy | 99 - .../tpcds_sf100/shape/query51.groovy | 85 - .../tpcds_sf100/shape/query52.groovy | 62 - .../tpcds_sf100/shape/query53.groovy | 68 - .../tpcds_sf100/shape/query54.groovy | 96 - .../tpcds_sf100/shape/query55.groovy | 54 - .../tpcds_sf100/shape/query56.groovy | 109 - .../tpcds_sf100/shape/query57.groovy | 88 - .../tpcds_sf100/shape/query58.groovy | 105 - .../tpcds_sf100/shape/query59.groovy | 84 - .../tpcds_sf100/shape/query6.groovy | 66 - .../tpcds_sf100/shape/query60.groovy | 118 - .../tpcds_sf100/shape/query61.groovy | 84 - .../tpcds_sf100/shape/query62.groovy | 75 - .../tpcds_sf100/shape/query63.groovy | 69 - .../tpcds_sf100/shape/query64.groovy | 161 - .../tpcds_sf100/shape/query65.groovy | 69 - .../tpcds_sf100/shape/query66.groovy | 260 -- .../tpcds_sf100/shape/query67.groovy | 84 - .../tpcds_sf100/shape/query68.groovy | 82 - .../tpcds_sf100/shape/query69.groovy | 87 - .../tpcds_sf100/shape/query7.groovy | 61 - .../tpcds_sf100/shape/query70.groovy | 78 - .../tpcds_sf100/shape/query71.groovy | 80 - .../tpcds_sf100/shape/query72.groovy | 69 - .../tpcds_sf100/shape/query73.groovy | 68 - .../tpcds_sf100/shape/query74.groovy | 101 - .../tpcds_sf100/shape/query75.groovy | 110 - .../tpcds_sf100/shape/query76.groovy | 64 - .../tpcds_sf100/shape/query77.groovy | 148 - .../tpcds_sf100/shape/query78.groovy | 98 - .../tpcds_sf100/shape/query79.groovy | 63 - .../tpcds_sf100/shape/query8.groovy | 148 - .../tpcds_sf100/shape/query80.groovy | 136 - .../tpcds_sf100/shape/query81.groovy | 71 - .../tpcds_sf100/shape/query82.groovy | 57 - .../tpcds_sf100/shape/query83.groovy | 107 - .../tpcds_sf100/shape/query84.groovy | 61 - .../tpcds_sf100/shape/query85.groovy | 124 - .../tpcds_sf100/shape/query86.groovy | 66 - .../tpcds_sf100/shape/query87.groovy | 63 - .../tpcds_sf100/shape/query88.groovy | 134 - .../tpcds_sf100/shape/query89.groovy | 68 - .../tpcds_sf100/shape/query9.groovy | 92 - .../tpcds_sf100/shape/query90.groovy | 62 - .../tpcds_sf100/shape/query91.groovy | 71 - .../tpcds_sf100/shape/query92.groovy | 70 - .../tpcds_sf100/shape/query93.groovy | 58 - .../tpcds_sf100/shape/query94.groovy | 69 - .../tpcds_sf100/shape/query95.groovy | 72 - .../tpcds_sf100/shape/query96.groovy | 56 - .../tpcds_sf100/shape/query97.groovy | 71 - .../tpcds_sf100/shape/query98.groovy | 73 - .../tpcds_sf100/shape/query99.groovy | 75 - .../shape/tpcds_sf100_stats.groovy | 77 - .../bs_downgrade_shape/query13.groovy | 140 - .../bs_downgrade_shape/query19.groovy | 86 - .../bs_downgrade_shape/query44.groovy | 106 - .../bs_downgrade_shape/query45.groovy | 76 - .../bs_downgrade_shape/query54.groovy | 148 - .../bs_downgrade_shape/query56.groovy | 174 - .../bs_downgrade_shape/query6.groovy | 88 - .../bs_downgrade_shape/query61.groovy | 124 - .../bs_downgrade_shape/query68.groovy | 120 - .../bs_downgrade_shape/query8.groovy | 252 - .../bs_downgrade_shape/query91.groovy | 98 - .../bs_downgrade_shape/query95.groovy | 100 - .../tpcds_sf1000/ddl/gen_shape.py | 26 - .../new_shapes_p0/tpcds_sf1000/ddl/shape.tmpl | 43 - .../eliminate_empty/query10_empty.groovy | 101 - .../new_shapes_p0/tpcds_sf1000/load.groovy | 2548 ---------- .../tpcds_sf1000/shape/query1.groovy | 87 - .../tpcds_sf1000/shape/query10.groovy | 155 - .../tpcds_sf1000/shape/query11.groovy | 199 - .../tpcds_sf1000/shape/query12.groovy | 105 - .../tpcds_sf1000/shape/query13.groovy | 141 - .../tpcds_sf1000/shape/query14.groovy | 245 - .../tpcds_sf1000/shape/query15.groovy | 77 - .../tpcds_sf1000/shape/query16.groovy | 99 - .../tpcds_sf1000/shape/query17.groovy | 127 - .../tpcds_sf1000/shape/query18.groovy | 105 - .../tpcds_sf1000/shape/query19.groovy | 87 - .../tpcds_sf1000/shape/query2.groovy | 157 - .../tpcds_sf1000/shape/query20.groovy | 97 - .../tpcds_sf1000/shape/query21.groovy | 98 - .../tpcds_sf1000/shape/query22.groovy | 77 - .../tpcds_sf1000/shape/query23.groovy | 146 - .../tpcds_sf1000/shape/query24.groovy | 147 - .../tpcds_sf1000/shape/query25.groovy | 133 - .../tpcds_sf1000/shape/query26.groovy | 79 - .../tpcds_sf1000/shape/query27.groovy | 83 - .../tpcds_sf1000/shape/query28.groovy | 143 - .../tpcds_sf1000/shape/query29.groovy | 131 - .../tpcds_sf1000/shape/query3.groovy | 79 - .../tpcds_sf1000/shape/query30.groovy | 99 - .../tpcds_sf1000/shape/query31.groovy | 141 - .../tpcds_sf1000/shape/query32.groovy | 98 - .../tpcds_sf1000/shape/query33.groovy | 187 - .../tpcds_sf1000/shape/query34.groovy | 99 - .../tpcds_sf1000/shape/query35.groovy | 153 - .../tpcds_sf1000/shape/query36.groovy | 97 - .../tpcds_sf1000/shape/query37.groovy | 71 - .../tpcds_sf1000/shape/query38.groovy | 88 - .../tpcds_sf1000/shape/query39.groovy | 91 - .../tpcds_sf1000/shape/query4.groovy | 269 -- .../tpcds_sf1000/shape/query40.groovy | 93 - .../tpcds_sf1000/shape/query41.groovy | 141 - .../tpcds_sf1000/shape/query42.groovy | 81 - .../tpcds_sf1000/shape/query43.groovy | 75 - .../tpcds_sf1000/shape/query44.groovy | 107 - .../tpcds_sf1000/shape/query45.groovy | 77 - .../tpcds_sf1000/shape/query46.groovy | 107 - .../tpcds_sf1000/shape/query47.groovy | 139 - .../tpcds_sf1000/shape/query48.groovy | 171 - .../tpcds_sf1000/shape/query49.groovy | 295 -- .../tpcds_sf1000/shape/query5.groovy | 293 -- .../tpcds_sf1000/shape/query50.groovy | 155 - .../tpcds_sf1000/shape/query51.groovy | 147 - .../tpcds_sf1000/shape/query52.groovy | 81 - .../tpcds_sf1000/shape/query53.groovy | 93 - .../tpcds_sf1000/shape/query54.groovy | 149 - .../tpcds_sf1000/shape/query55.groovy | 65 - .../tpcds_sf1000/shape/query56.groovy | 175 - .../tpcds_sf1000/shape/query57.groovy | 133 - .../tpcds_sf1000/shape/query58.groovy | 167 - .../tpcds_sf1000/shape/query59.groovy | 125 - .../tpcds_sf1000/shape/query6.groovy | 89 - .../tpcds_sf1000/shape/query60.groovy | 193 - .../tpcds_sf1000/shape/query61.groovy | 125 - .../tpcds_sf1000/shape/query62.groovy | 107 - .../tpcds_sf1000/shape/query63.groovy | 95 - .../tpcds_sf1000/shape/query64.groovy | 279 -- .../tpcds_sf1000/shape/query65.groovy | 95 - .../tpcds_sf1000/shape/query66.groovy | 477 -- .../tpcds_sf1000/shape/query67.groovy | 125 - .../tpcds_sf1000/shape/query68.groovy | 121 - .../tpcds_sf1000/shape/query69.groovy | 131 - .../tpcds_sf1000/shape/query7.groovy | 79 - .../tpcds_sf1000/shape/query70.groovy | 113 - .../tpcds_sf1000/shape/query71.groovy | 117 - .../tpcds_sf1000/shape/query72.groovy | 95 - .../tpcds_sf1000/shape/query73.groovy | 93 - .../tpcds_sf1000/shape/query74.groovy | 159 - .../tpcds_sf1000/shape/query75.groovy | 177 - .../tpcds_sf1000/shape/query76.groovy | 85 - .../tpcds_sf1000/shape/query77.groovy | 253 - .../tpcds_sf1000/shape/query78.groovy | 153 - .../tpcds_sf1000/shape/query79.groovy | 83 - .../tpcds_sf1000/shape/query8.groovy | 253 - .../tpcds_sf1000/shape/query80.groovy | 229 - .../tpcds_sf1000/shape/query81.groovy | 99 - .../tpcds_sf1000/shape/query82.groovy | 71 - .../tpcds_sf1000/shape/query83.groovy | 171 - .../tpcds_sf1000/shape/query84.groovy | 79 - .../tpcds_sf1000/shape/query85.groovy | 205 - .../tpcds_sf1000/shape/query86.groovy | 89 - .../tpcds_sf1000/shape/query87.groovy | 83 - .../tpcds_sf1000/shape/query88.groovy | 225 - .../tpcds_sf1000/shape/query89.groovy | 93 - .../tpcds_sf1000/shape/query9.groovy | 140 - .../tpcds_sf1000/shape/query90.groovy | 81 - .../tpcds_sf1000/shape/query91.groovy | 99 - .../tpcds_sf1000/shape/query92.groovy | 97 - .../tpcds_sf1000/shape/query93.groovy | 73 - .../tpcds_sf1000/shape/query94.groovy | 95 - .../tpcds_sf1000/shape/query95.groovy | 101 - .../tpcds_sf1000/shape/query96.groovy | 69 - .../tpcds_sf1000/shape/query97.groovy | 92 - .../tpcds_sf1000/shape/query98.groovy | 103 - .../tpcds_sf1000/shape/query99.groovy | 107 - .../new_shapes_p0/tpch_sf1000/load.groovy | 484 -- .../tpch_sf1000/nostats_rf_prune/q1.groovy | 64 - .../tpch_sf1000/nostats_rf_prune/q10.groovy | 76 - .../tpch_sf1000/nostats_rf_prune/q11.groovy | 72 - .../tpch_sf1000/nostats_rf_prune/q12.groovy | 71 - .../tpch_sf1000/nostats_rf_prune/q13.groovy | 67 - .../tpch_sf1000/nostats_rf_prune/q14.groovy | 57 - .../tpch_sf1000/nostats_rf_prune/q15.groovy | 67 - .../tpch_sf1000/nostats_rf_prune/q16.groovy | 78 - .../tpch_sf1000/nostats_rf_prune/q17.groovy | 65 - .../tpch_sf1000/nostats_rf_prune/q18.groovy | 81 - .../tpch_sf1000/nostats_rf_prune/q19.groovy | 85 - .../tpch_sf1000/nostats_rf_prune/q2.groovy | 91 - .../nostats_rf_prune/q20-rewrite.groovy | 76 - .../tpch_sf1000/nostats_rf_prune/q20.groovy | 86 - .../tpch_sf1000/nostats_rf_prune/q21.groovy | 88 - .../tpch_sf1000/nostats_rf_prune/q22.groovy | 85 - .../tpch_sf1000/nostats_rf_prune/q3.groovy | 71 - .../tpch_sf1000/nostats_rf_prune/q4.groovy | 67 - .../tpch_sf1000/nostats_rf_prune/q5.groovy | 71 - .../tpch_sf1000/nostats_rf_prune/q6.groovy | 56 - .../tpch_sf1000/nostats_rf_prune/q7.groovy | 86 - .../tpch_sf1000/nostats_rf_prune/q8.groovy | 84 - .../tpch_sf1000/nostats_rf_prune/q9.groovy | 79 - .../tpch_sf1000/rf_prune/q1.groovy | 65 - .../tpch_sf1000/rf_prune/q10.groovy | 77 - .../tpch_sf1000/rf_prune/q11.groovy | 73 - .../tpch_sf1000/rf_prune/q12.groovy | 72 - .../tpch_sf1000/rf_prune/q13.groovy | 68 - .../tpch_sf1000/rf_prune/q14.groovy | 58 - .../tpch_sf1000/rf_prune/q15.groovy | 67 - .../tpch_sf1000/rf_prune/q16.groovy | 79 - .../tpch_sf1000/rf_prune/q17.groovy | 66 - .../tpch_sf1000/rf_prune/q18.groovy | 82 - .../tpch_sf1000/rf_prune/q19.groovy | 86 - .../tpch_sf1000/rf_prune/q2.groovy | 92 - .../tpch_sf1000/rf_prune/q20-rewrite.groovy | 77 - .../tpch_sf1000/rf_prune/q20.groovy | 87 - .../tpch_sf1000/rf_prune/q21.groovy | 89 - .../tpch_sf1000/rf_prune/q22.groovy | 86 - .../tpch_sf1000/rf_prune/q3.groovy | 72 - .../tpch_sf1000/rf_prune/q4.groovy | 68 - .../tpch_sf1000/rf_prune/q5.groovy | 72 - .../tpch_sf1000/rf_prune/q6.groovy | 57 - .../tpch_sf1000/rf_prune/q7.groovy | 87 - .../tpch_sf1000/rf_prune/q8.groovy | 85 - .../tpch_sf1000/rf_prune/q9.groovy | 80 - .../runtime_filter/test_pushdown_setop.groovy | 48 - .../new_shapes_p0/tpch_sf1000/shape/q1.groovy | 62 - .../tpch_sf1000/shape/q10.groovy | 74 - .../tpch_sf1000/shape/q11.groovy | 71 - .../tpch_sf1000/shape/q12.groovy | 69 - .../tpch_sf1000/shape/q13.groovy | 65 - .../tpch_sf1000/shape/q14.groovy | 55 - .../tpch_sf1000/shape/q15.groovy | 65 - .../tpch_sf1000/shape/q16.groovy | 76 - .../tpch_sf1000/shape/q17.groovy | 63 - .../tpch_sf1000/shape/q18.groovy | 79 - .../tpch_sf1000/shape/q19.groovy | 83 - .../new_shapes_p0/tpch_sf1000/shape/q2.groovy | 90 - .../tpch_sf1000/shape/q20-rewrite.groovy | 74 - .../tpch_sf1000/shape/q20.groovy | 84 - .../tpch_sf1000/shape/q21.groovy | 86 - .../tpch_sf1000/shape/q22.groovy | 83 - .../new_shapes_p0/tpch_sf1000/shape/q3.groovy | 70 - .../new_shapes_p0/tpch_sf1000/shape/q4.groovy | 66 - .../new_shapes_p0/tpch_sf1000/shape/q5.groovy | 70 - .../new_shapes_p0/tpch_sf1000/shape/q6.groovy | 55 - .../new_shapes_p0/tpch_sf1000/shape/q7.groovy | 85 - .../new_shapes_p0/tpch_sf1000/shape/q8.groovy | 83 - .../new_shapes_p0/tpch_sf1000/shape/q9.groovy | 78 - .../tpch_sf1000/shape_no_stats/q1.groovy | 66 - .../tpch_sf1000/shape_no_stats/q10.groovy | 77 - .../tpch_sf1000/shape_no_stats/q11.groovy | 74 - .../tpch_sf1000/shape_no_stats/q12.groovy | 72 - .../tpch_sf1000/shape_no_stats/q13.groovy | 68 - .../tpch_sf1000/shape_no_stats/q14.groovy | 58 - .../tpch_sf1000/shape_no_stats/q15.groovy | 68 - .../tpch_sf1000/shape_no_stats/q16.groovy | 79 - .../tpch_sf1000/shape_no_stats/q17.groovy | 66 - .../tpch_sf1000/shape_no_stats/q18.groovy | 82 - .../tpch_sf1000/shape_no_stats/q19.groovy | 86 - .../tpch_sf1000/shape_no_stats/q2.groovy | 93 - .../shape_no_stats/q20-rewrite.groovy | 77 - .../tpch_sf1000/shape_no_stats/q20.groovy | 87 - .../tpch_sf1000/shape_no_stats/q21.groovy | 89 - .../tpch_sf1000/shape_no_stats/q22.groovy | 86 - .../tpch_sf1000/shape_no_stats/q3.groovy | 73 - .../tpch_sf1000/shape_no_stats/q4.groovy | 69 - .../tpch_sf1000/shape_no_stats/q5.groovy | 73 - .../tpch_sf1000/shape_no_stats/q6.groovy | 58 - .../tpch_sf1000/shape_no_stats/q7.groovy | 88 - .../tpch_sf1000/shape_no_stats/q8.groovy | 86 - .../tpch_sf1000/shape_no_stats/q9.groovy | 81 - .../clickbench}/load.groovy | 0 .../clickbench}/query1.groovy | 0 .../clickbench}/query10.groovy | 0 .../clickbench}/query11.groovy | 0 .../clickbench}/query12.groovy | 0 .../clickbench}/query13.groovy | 0 .../clickbench}/query14.groovy | 0 .../clickbench}/query15.groovy | 0 .../clickbench}/query16.groovy | 0 .../clickbench}/query17.groovy | 0 .../clickbench}/query18.groovy | 0 .../clickbench}/query19.groovy | 0 .../clickbench}/query2.groovy | 0 .../clickbench}/query20.groovy | 0 .../clickbench}/query21.groovy | 0 .../clickbench}/query22.groovy | 0 .../clickbench}/query23.groovy | 0 .../clickbench}/query24.groovy | 0 .../clickbench}/query25.groovy | 0 .../clickbench}/query26.groovy | 0 .../clickbench}/query27.groovy | 0 .../clickbench}/query28.groovy | 0 .../clickbench}/query29.groovy | 0 .../clickbench}/query3.groovy | 0 .../clickbench}/query30.groovy | 0 .../clickbench}/query31.groovy | 0 .../clickbench}/query32.groovy | 0 .../clickbench}/query33.groovy | 0 .../clickbench}/query34.groovy | 0 .../clickbench}/query35.groovy | 0 .../clickbench}/query36.groovy | 0 .../clickbench}/query37.groovy | 0 .../clickbench}/query38.groovy | 0 .../clickbench}/query39.groovy | 0 .../clickbench}/query4.groovy | 0 .../clickbench}/query40.groovy | 0 .../clickbench}/query41.groovy | 0 .../clickbench}/query42.groovy | 0 .../clickbench}/query43.groovy | 0 .../clickbench}/query5.groovy | 0 .../clickbench}/query6.groovy | 0 .../clickbench}/query7.groovy | 0 .../clickbench}/query8.groovy | 0 .../clickbench}/query9.groovy | 0 .../ssb_sf100}/load.groovy | 0 .../ssb_sf100}/shape/flat.groovy | 0 .../ssb_sf100}/shape/q1.1.groovy | 0 .../ssb_sf100}/shape/q1.2.groovy | 0 .../ssb_sf100}/shape/q1.3.groovy | 0 .../ssb_sf100}/shape/q2.1.groovy | 0 .../ssb_sf100}/shape/q2.2.groovy | 0 .../ssb_sf100}/shape/q2.3.groovy | 0 .../ssb_sf100}/shape/q3.1.groovy | 0 .../ssb_sf100}/shape/q3.2.groovy | 0 .../ssb_sf100}/shape/q3.3.groovy | 0 .../ssb_sf100}/shape/q3.4.groovy | 0 .../ssb_sf100}/shape/q4.1.groovy | 0 .../ssb_sf100}/shape/q4.2.groovy | 0 .../ssb_sf100}/shape/q4.3.groovy | 0 .../tpcds_sf100}/constraints/load.groovy | 0 .../tpcds_sf100}/constraints/query23.groovy | 0 .../tpcds_sf100}/ddl/gen_rf_prune.py | 0 .../tpcds_sf100}/ddl/gen_shape.py | 0 .../tpcds_sf100}/ddl/rf_prune.tmpl | 0 .../tpcds_sf100}/ddl/shape.tmpl | 0 .../tpcds_sf100}/load.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query1.groovy | 0 .../noStatsRfPrune/query10.groovy | 0 .../noStatsRfPrune/query11.groovy | 0 .../noStatsRfPrune/query12.groovy | 0 .../noStatsRfPrune/query13.groovy | 0 .../noStatsRfPrune/query14.groovy | 0 .../noStatsRfPrune/query15.groovy | 0 .../noStatsRfPrune/query16.groovy | 0 .../noStatsRfPrune/query17.groovy | 0 .../noStatsRfPrune/query18.groovy | 0 .../noStatsRfPrune/query19.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query2.groovy | 0 .../noStatsRfPrune/query20.groovy | 0 .../noStatsRfPrune/query21.groovy | 0 .../noStatsRfPrune/query22.groovy | 0 .../noStatsRfPrune/query23.groovy | 0 .../noStatsRfPrune/query24.groovy | 0 .../noStatsRfPrune/query25.groovy | 0 .../noStatsRfPrune/query26.groovy | 0 .../noStatsRfPrune/query27.groovy | 0 .../noStatsRfPrune/query28.groovy | 0 .../noStatsRfPrune/query29.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query3.groovy | 0 .../noStatsRfPrune/query30.groovy | 0 .../noStatsRfPrune/query31.groovy | 0 .../noStatsRfPrune/query32.groovy | 0 .../noStatsRfPrune/query33.groovy | 0 .../noStatsRfPrune/query34.groovy | 0 .../noStatsRfPrune/query35.groovy | 0 .../noStatsRfPrune/query36.groovy | 0 .../noStatsRfPrune/query37.groovy | 0 .../noStatsRfPrune/query38.groovy | 0 .../noStatsRfPrune/query39.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query4.groovy | 0 .../noStatsRfPrune/query40.groovy | 0 .../noStatsRfPrune/query41.groovy | 0 .../noStatsRfPrune/query42.groovy | 0 .../noStatsRfPrune/query43.groovy | 0 .../noStatsRfPrune/query44.groovy | 0 .../noStatsRfPrune/query45.groovy | 0 .../noStatsRfPrune/query46.groovy | 0 .../noStatsRfPrune/query47.groovy | 0 .../noStatsRfPrune/query48.groovy | 0 .../noStatsRfPrune/query49.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query5.groovy | 0 .../noStatsRfPrune/query50.groovy | 0 .../noStatsRfPrune/query51.groovy | 0 .../noStatsRfPrune/query52.groovy | 0 .../noStatsRfPrune/query53.groovy | 0 .../noStatsRfPrune/query54.groovy | 0 .../noStatsRfPrune/query55.groovy | 0 .../noStatsRfPrune/query56.groovy | 0 .../noStatsRfPrune/query57.groovy | 0 .../noStatsRfPrune/query58.groovy | 0 .../noStatsRfPrune/query59.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query6.groovy | 0 .../noStatsRfPrune/query60.groovy | 0 .../noStatsRfPrune/query61.groovy | 0 .../noStatsRfPrune/query62.groovy | 0 .../noStatsRfPrune/query63.groovy | 0 .../noStatsRfPrune/query64.groovy | 0 .../noStatsRfPrune/query65.groovy | 0 .../noStatsRfPrune/query66.groovy | 0 .../noStatsRfPrune/query67.groovy | 0 .../noStatsRfPrune/query68.groovy | 0 .../noStatsRfPrune/query69.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query7.groovy | 0 .../noStatsRfPrune/query70.groovy | 0 .../noStatsRfPrune/query71.groovy | 0 .../noStatsRfPrune/query72.groovy | 0 .../noStatsRfPrune/query73.groovy | 0 .../noStatsRfPrune/query74.groovy | 0 .../noStatsRfPrune/query75.groovy | 0 .../noStatsRfPrune/query76.groovy | 0 .../noStatsRfPrune/query77.groovy | 0 .../noStatsRfPrune/query78.groovy | 0 .../noStatsRfPrune/query79.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query8.groovy | 0 .../noStatsRfPrune/query80.groovy | 0 .../noStatsRfPrune/query81.groovy | 0 .../noStatsRfPrune/query82.groovy | 0 .../noStatsRfPrune/query83.groovy | 0 .../noStatsRfPrune/query84.groovy | 0 .../noStatsRfPrune/query85.groovy | 0 .../noStatsRfPrune/query86.groovy | 0 .../noStatsRfPrune/query87.groovy | 0 .../noStatsRfPrune/query88.groovy | 0 .../noStatsRfPrune/query89.groovy | 0 .../tpcds_sf100}/noStatsRfPrune/query9.groovy | 0 .../noStatsRfPrune/query90.groovy | 0 .../noStatsRfPrune/query91.groovy | 0 .../noStatsRfPrune/query92.groovy | 0 .../noStatsRfPrune/query93.groovy | 0 .../noStatsRfPrune/query94.groovy | 0 .../noStatsRfPrune/query95.groovy | 0 .../noStatsRfPrune/query96.groovy | 0 .../noStatsRfPrune/query97.groovy | 0 .../noStatsRfPrune/query98.groovy | 0 .../noStatsRfPrune/query99.groovy | 0 .../tpcds_sf100}/no_stats_shape/query1.groovy | 0 .../no_stats_shape/query10.groovy | 0 .../no_stats_shape/query11.groovy | 0 .../no_stats_shape/query12.groovy | 0 .../no_stats_shape/query13.groovy | 0 .../no_stats_shape/query14.groovy | 0 .../no_stats_shape/query15.groovy | 0 .../no_stats_shape/query16.groovy | 0 .../no_stats_shape/query17.groovy | 0 .../no_stats_shape/query18.groovy | 0 .../no_stats_shape/query19.groovy | 0 .../tpcds_sf100}/no_stats_shape/query2.groovy | 0 .../no_stats_shape/query20.groovy | 0 .../no_stats_shape/query21.groovy | 0 .../no_stats_shape/query22.groovy | 0 .../no_stats_shape/query23.groovy | 0 .../no_stats_shape/query24.groovy | 0 .../no_stats_shape/query25.groovy | 0 .../no_stats_shape/query26.groovy | 0 .../no_stats_shape/query27.groovy | 0 .../no_stats_shape/query28.groovy | 0 .../no_stats_shape/query29.groovy | 0 .../tpcds_sf100}/no_stats_shape/query3.groovy | 0 .../no_stats_shape/query30.groovy | 0 .../no_stats_shape/query31.groovy | 0 .../no_stats_shape/query32.groovy | 0 .../no_stats_shape/query33.groovy | 0 .../no_stats_shape/query34.groovy | 0 .../no_stats_shape/query35.groovy | 0 .../no_stats_shape/query36.groovy | 0 .../no_stats_shape/query37.groovy | 0 .../no_stats_shape/query38.groovy | 0 .../no_stats_shape/query39.groovy | 0 .../tpcds_sf100}/no_stats_shape/query4.groovy | 0 .../no_stats_shape/query40.groovy | 0 .../no_stats_shape/query41.groovy | 0 .../no_stats_shape/query42.groovy | 0 .../no_stats_shape/query43.groovy | 0 .../no_stats_shape/query44.groovy | 0 .../no_stats_shape/query45.groovy | 0 .../no_stats_shape/query46.groovy | 0 .../no_stats_shape/query47.groovy | 0 .../no_stats_shape/query48.groovy | 0 .../no_stats_shape/query49.groovy | 0 .../tpcds_sf100}/no_stats_shape/query5.groovy | 0 .../no_stats_shape/query50.groovy | 0 .../no_stats_shape/query51.groovy | 0 .../no_stats_shape/query52.groovy | 0 .../no_stats_shape/query53.groovy | 0 .../no_stats_shape/query54.groovy | 0 .../no_stats_shape/query55.groovy | 0 .../no_stats_shape/query56.groovy | 0 .../no_stats_shape/query57.groovy | 0 .../no_stats_shape/query58.groovy | 0 .../no_stats_shape/query59.groovy | 0 .../tpcds_sf100}/no_stats_shape/query6.groovy | 0 .../no_stats_shape/query60.groovy | 0 .../no_stats_shape/query61.groovy | 0 .../no_stats_shape/query62.groovy | 0 .../no_stats_shape/query63.groovy | 0 .../no_stats_shape/query64.groovy | 0 .../no_stats_shape/query65.groovy | 0 .../no_stats_shape/query66.groovy | 0 .../no_stats_shape/query67.groovy | 0 .../no_stats_shape/query68.groovy | 0 .../no_stats_shape/query69.groovy | 0 .../tpcds_sf100}/no_stats_shape/query7.groovy | 0 .../no_stats_shape/query70.groovy | 0 .../no_stats_shape/query71.groovy | 0 .../no_stats_shape/query72.groovy | 0 .../no_stats_shape/query73.groovy | 0 .../no_stats_shape/query74.groovy | 0 .../no_stats_shape/query75.groovy | 0 .../no_stats_shape/query76.groovy | 0 .../no_stats_shape/query77.groovy | 0 .../no_stats_shape/query78.groovy | 0 .../no_stats_shape/query79.groovy | 0 .../tpcds_sf100}/no_stats_shape/query8.groovy | 0 .../no_stats_shape/query80.groovy | 0 .../no_stats_shape/query81.groovy | 0 .../no_stats_shape/query82.groovy | 0 .../no_stats_shape/query83.groovy | 0 .../no_stats_shape/query84.groovy | 0 .../no_stats_shape/query85.groovy | 0 .../no_stats_shape/query86.groovy | 0 .../no_stats_shape/query87.groovy | 0 .../no_stats_shape/query88.groovy | 0 .../no_stats_shape/query89.groovy | 0 .../tpcds_sf100}/no_stats_shape/query9.groovy | 0 .../no_stats_shape/query90.groovy | 0 .../no_stats_shape/query91.groovy | 0 .../no_stats_shape/query92.groovy | 0 .../no_stats_shape/query93.groovy | 0 .../no_stats_shape/query94.groovy | 0 .../no_stats_shape/query95.groovy | 0 .../no_stats_shape/query96.groovy | 0 .../no_stats_shape/query97.groovy | 0 .../no_stats_shape/query98.groovy | 0 .../no_stats_shape/query99.groovy | 0 .../tpcds_sf100}/rf_prune/query1.groovy | 0 .../tpcds_sf100}/rf_prune/query10.groovy | 0 .../tpcds_sf100}/rf_prune/query11.groovy | 0 .../tpcds_sf100}/rf_prune/query12.groovy | 0 .../tpcds_sf100}/rf_prune/query13.groovy | 0 .../tpcds_sf100}/rf_prune/query14.groovy | 0 .../tpcds_sf100}/rf_prune/query15.groovy | 0 .../tpcds_sf100}/rf_prune/query16.groovy | 0 .../tpcds_sf100}/rf_prune/query17.groovy | 0 .../tpcds_sf100}/rf_prune/query18.groovy | 0 .../tpcds_sf100}/rf_prune/query19.groovy | 0 .../tpcds_sf100}/rf_prune/query2.groovy | 0 .../tpcds_sf100}/rf_prune/query20.groovy | 0 .../tpcds_sf100}/rf_prune/query21.groovy | 0 .../tpcds_sf100}/rf_prune/query22.groovy | 0 .../tpcds_sf100}/rf_prune/query23.groovy | 0 .../tpcds_sf100}/rf_prune/query24.groovy | 0 .../tpcds_sf100}/rf_prune/query25.groovy | 0 .../tpcds_sf100}/rf_prune/query26.groovy | 0 .../tpcds_sf100}/rf_prune/query27.groovy | 0 .../tpcds_sf100}/rf_prune/query28.groovy | 0 .../tpcds_sf100}/rf_prune/query29.groovy | 0 .../tpcds_sf100}/rf_prune/query3.groovy | 0 .../tpcds_sf100}/rf_prune/query30.groovy | 0 .../tpcds_sf100}/rf_prune/query31.groovy | 0 .../tpcds_sf100}/rf_prune/query32.groovy | 0 .../tpcds_sf100}/rf_prune/query33.groovy | 0 .../tpcds_sf100}/rf_prune/query34.groovy | 0 .../tpcds_sf100}/rf_prune/query35.groovy | 0 .../tpcds_sf100}/rf_prune/query36.groovy | 0 .../tpcds_sf100}/rf_prune/query37.groovy | 0 .../tpcds_sf100}/rf_prune/query38.groovy | 0 .../tpcds_sf100}/rf_prune/query39.groovy | 0 .../tpcds_sf100}/rf_prune/query4.groovy | 0 .../tpcds_sf100}/rf_prune/query40.groovy | 0 .../tpcds_sf100}/rf_prune/query41.groovy | 0 .../tpcds_sf100}/rf_prune/query42.groovy | 0 .../tpcds_sf100}/rf_prune/query43.groovy | 0 .../tpcds_sf100}/rf_prune/query44.groovy | 0 .../tpcds_sf100}/rf_prune/query45.groovy | 0 .../tpcds_sf100}/rf_prune/query46.groovy | 0 .../tpcds_sf100}/rf_prune/query47.groovy | 0 .../tpcds_sf100}/rf_prune/query48.groovy | 0 .../tpcds_sf100}/rf_prune/query49.groovy | 0 .../tpcds_sf100}/rf_prune/query5.groovy | 0 .../tpcds_sf100}/rf_prune/query50.groovy | 0 .../tpcds_sf100}/rf_prune/query51.groovy | 0 .../tpcds_sf100}/rf_prune/query52.groovy | 0 .../tpcds_sf100}/rf_prune/query53.groovy | 0 .../tpcds_sf100}/rf_prune/query54.groovy | 0 .../tpcds_sf100}/rf_prune/query55.groovy | 0 .../tpcds_sf100}/rf_prune/query56.groovy | 0 .../tpcds_sf100}/rf_prune/query57.groovy | 0 .../tpcds_sf100}/rf_prune/query58.groovy | 0 .../tpcds_sf100}/rf_prune/query59.groovy | 0 .../tpcds_sf100}/rf_prune/query6.groovy | 0 .../tpcds_sf100}/rf_prune/query60.groovy | 0 .../tpcds_sf100}/rf_prune/query61.groovy | 0 .../tpcds_sf100}/rf_prune/query62.groovy | 0 .../tpcds_sf100}/rf_prune/query63.groovy | 0 .../tpcds_sf100}/rf_prune/query64.groovy | 0 .../tpcds_sf100}/rf_prune/query65.groovy | 0 .../tpcds_sf100}/rf_prune/query66.groovy | 0 .../tpcds_sf100}/rf_prune/query67.groovy | 0 .../tpcds_sf100}/rf_prune/query68.groovy | 0 .../tpcds_sf100}/rf_prune/query69.groovy | 0 .../tpcds_sf100}/rf_prune/query7.groovy | 0 .../tpcds_sf100}/rf_prune/query70.groovy | 0 .../tpcds_sf100}/rf_prune/query71.groovy | 0 .../tpcds_sf100}/rf_prune/query72.groovy | 0 .../tpcds_sf100}/rf_prune/query73.groovy | 0 .../tpcds_sf100}/rf_prune/query74.groovy | 0 .../tpcds_sf100}/rf_prune/query75.groovy | 0 .../tpcds_sf100}/rf_prune/query76.groovy | 0 .../tpcds_sf100}/rf_prune/query77.groovy | 0 .../tpcds_sf100}/rf_prune/query78.groovy | 0 .../tpcds_sf100}/rf_prune/query79.groovy | 0 .../tpcds_sf100}/rf_prune/query8.groovy | 0 .../tpcds_sf100}/rf_prune/query80.groovy | 0 .../tpcds_sf100}/rf_prune/query81.groovy | 0 .../tpcds_sf100}/rf_prune/query82.groovy | 0 .../tpcds_sf100}/rf_prune/query83.groovy | 0 .../tpcds_sf100}/rf_prune/query84.groovy | 0 .../tpcds_sf100}/rf_prune/query85.groovy | 0 .../tpcds_sf100}/rf_prune/query86.groovy | 0 .../tpcds_sf100}/rf_prune/query87.groovy | 0 .../tpcds_sf100}/rf_prune/query88.groovy | 0 .../tpcds_sf100}/rf_prune/query89.groovy | 0 .../tpcds_sf100}/rf_prune/query9.groovy | 0 .../tpcds_sf100}/rf_prune/query90.groovy | 0 .../tpcds_sf100}/rf_prune/query91.groovy | 0 .../tpcds_sf100}/rf_prune/query92.groovy | 0 .../tpcds_sf100}/rf_prune/query93.groovy | 0 .../tpcds_sf100}/rf_prune/query94.groovy | 0 .../tpcds_sf100}/rf_prune/query95.groovy | 0 .../tpcds_sf100}/rf_prune/query96.groovy | 0 .../tpcds_sf100}/rf_prune/query97.groovy | 0 .../tpcds_sf100}/rf_prune/query98.groovy | 0 .../tpcds_sf100}/rf_prune/query99.groovy | 0 .../tpcds_sf100}/shape/query1.groovy | 0 .../tpcds_sf100}/shape/query10.groovy | 0 .../tpcds_sf100}/shape/query11.groovy | 0 .../tpcds_sf100}/shape/query12.groovy | 0 .../tpcds_sf100}/shape/query13.groovy | 0 .../tpcds_sf100}/shape/query14.groovy | 0 .../tpcds_sf100}/shape/query15.groovy | 0 .../tpcds_sf100}/shape/query16.groovy | 0 .../tpcds_sf100}/shape/query17.groovy | 0 .../tpcds_sf100}/shape/query18.groovy | 0 .../tpcds_sf100}/shape/query19.groovy | 0 .../tpcds_sf100}/shape/query2.groovy | 0 .../tpcds_sf100}/shape/query20.groovy | 0 .../tpcds_sf100}/shape/query21.groovy | 0 .../tpcds_sf100}/shape/query22.groovy | 0 .../tpcds_sf100}/shape/query23.groovy | 0 .../tpcds_sf100}/shape/query24.groovy | 0 .../tpcds_sf100}/shape/query25.groovy | 0 .../tpcds_sf100}/shape/query26.groovy | 0 .../tpcds_sf100}/shape/query27.groovy | 0 .../tpcds_sf100}/shape/query28.groovy | 0 .../tpcds_sf100}/shape/query29.groovy | 0 .../tpcds_sf100}/shape/query3.groovy | 0 .../tpcds_sf100}/shape/query30.groovy | 0 .../tpcds_sf100}/shape/query31.groovy | 0 .../tpcds_sf100}/shape/query32.groovy | 0 .../tpcds_sf100}/shape/query33.groovy | 0 .../tpcds_sf100}/shape/query34.groovy | 0 .../tpcds_sf100}/shape/query35.groovy | 0 .../tpcds_sf100}/shape/query36.groovy | 0 .../tpcds_sf100}/shape/query37.groovy | 0 .../tpcds_sf100}/shape/query38.groovy | 0 .../tpcds_sf100}/shape/query39.groovy | 0 .../tpcds_sf100}/shape/query4.groovy | 0 .../tpcds_sf100}/shape/query40.groovy | 0 .../tpcds_sf100}/shape/query41.groovy | 0 .../tpcds_sf100}/shape/query42.groovy | 0 .../tpcds_sf100}/shape/query43.groovy | 0 .../tpcds_sf100}/shape/query44.groovy | 0 .../tpcds_sf100}/shape/query45.groovy | 0 .../tpcds_sf100}/shape/query46.groovy | 0 .../tpcds_sf100}/shape/query47.groovy | 0 .../tpcds_sf100}/shape/query48.groovy | 0 .../tpcds_sf100}/shape/query49.groovy | 0 .../tpcds_sf100}/shape/query5.groovy | 0 .../tpcds_sf100}/shape/query50.groovy | 0 .../tpcds_sf100}/shape/query51.groovy | 0 .../tpcds_sf100}/shape/query52.groovy | 0 .../tpcds_sf100}/shape/query53.groovy | 0 .../tpcds_sf100}/shape/query54.groovy | 0 .../tpcds_sf100}/shape/query55.groovy | 0 .../tpcds_sf100}/shape/query56.groovy | 0 .../tpcds_sf100}/shape/query57.groovy | 0 .../tpcds_sf100}/shape/query58.groovy | 0 .../tpcds_sf100}/shape/query59.groovy | 0 .../tpcds_sf100}/shape/query6.groovy | 0 .../tpcds_sf100}/shape/query60.groovy | 0 .../tpcds_sf100}/shape/query61.groovy | 0 .../tpcds_sf100}/shape/query62.groovy | 0 .../tpcds_sf100}/shape/query63.groovy | 0 .../tpcds_sf100}/shape/query64.groovy | 0 .../tpcds_sf100}/shape/query65.groovy | 0 .../tpcds_sf100}/shape/query66.groovy | 0 .../tpcds_sf100}/shape/query67.groovy | 0 .../tpcds_sf100}/shape/query68.groovy | 0 .../tpcds_sf100}/shape/query69.groovy | 0 .../tpcds_sf100}/shape/query7.groovy | 0 .../tpcds_sf100}/shape/query70.groovy | 0 .../tpcds_sf100}/shape/query71.groovy | 0 .../tpcds_sf100}/shape/query72.groovy | 0 .../tpcds_sf100}/shape/query73.groovy | 0 .../tpcds_sf100}/shape/query74.groovy | 0 .../tpcds_sf100}/shape/query75.groovy | 0 .../tpcds_sf100}/shape/query76.groovy | 0 .../tpcds_sf100}/shape/query77.groovy | 0 .../tpcds_sf100}/shape/query78.groovy | 0 .../tpcds_sf100}/shape/query79.groovy | 0 .../tpcds_sf100}/shape/query8.groovy | 0 .../tpcds_sf100}/shape/query80.groovy | 0 .../tpcds_sf100}/shape/query81.groovy | 0 .../tpcds_sf100}/shape/query82.groovy | 0 .../tpcds_sf100}/shape/query83.groovy | 0 .../tpcds_sf100}/shape/query84.groovy | 0 .../tpcds_sf100}/shape/query85.groovy | 0 .../tpcds_sf100}/shape/query86.groovy | 0 .../tpcds_sf100}/shape/query87.groovy | 0 .../tpcds_sf100}/shape/query88.groovy | 0 .../tpcds_sf100}/shape/query89.groovy | 0 .../tpcds_sf100}/shape/query9.groovy | 0 .../tpcds_sf100}/shape/query90.groovy | 0 .../tpcds_sf100}/shape/query91.groovy | 0 .../tpcds_sf100}/shape/query92.groovy | 0 .../tpcds_sf100}/shape/query93.groovy | 0 .../tpcds_sf100}/shape/query94.groovy | 0 .../tpcds_sf100}/shape/query95.groovy | 0 .../tpcds_sf100}/shape/query96.groovy | 0 .../tpcds_sf100}/shape/query97.groovy | 0 .../tpcds_sf100}/shape/query98.groovy | 0 .../tpcds_sf100}/shape/query99.groovy | 0 .../shape/tpcds_sf100_stats.groovy | 0 .../bs_downgrade_shape/query13.groovy | 0 .../bs_downgrade_shape/query19.groovy | 0 .../bs_downgrade_shape/query44.groovy | 0 .../bs_downgrade_shape/query45.groovy | 0 .../bs_downgrade_shape/query54.groovy | 0 .../bs_downgrade_shape/query56.groovy | 0 .../bs_downgrade_shape/query6.groovy | 0 .../bs_downgrade_shape/query61.groovy | 0 .../bs_downgrade_shape/query68.groovy | 0 .../bs_downgrade_shape/query8.groovy | 0 .../bs_downgrade_shape/query91.groovy | 0 .../bs_downgrade_shape/query95.groovy | 0 .../tpcds_sf1000}/ddl/gen_shape.py | 0 .../tpcds_sf1000}/ddl/shape.tmpl | 0 .../eliminate_empty/query10_empty.groovy | 0 .../tpcds_sf1000/hint}/query1.groovy | 0 .../tpcds_sf1000/hint}/query10.groovy | 0 .../tpcds_sf1000/hint}/query11.groovy | 0 .../tpcds_sf1000/hint}/query12.groovy | 0 .../tpcds_sf1000/hint}/query13.groovy | 0 .../tpcds_sf1000/hint}/query14.groovy | 0 .../tpcds_sf1000/hint}/query15.groovy | 0 .../tpcds_sf1000/hint}/query16.groovy | 0 .../tpcds_sf1000/hint}/query17.groovy | 0 .../tpcds_sf1000/hint}/query18.groovy | 0 .../tpcds_sf1000/hint}/query19.groovy | 0 .../tpcds_sf1000/hint}/query2.groovy | 0 .../tpcds_sf1000/hint}/query20.groovy | 0 .../tpcds_sf1000/hint}/query21.groovy | 0 .../tpcds_sf1000/hint}/query22.groovy | 0 .../tpcds_sf1000/hint}/query23.groovy | 0 .../tpcds_sf1000/hint}/query24.groovy | 0 .../tpcds_sf1000/hint}/query25.groovy | 0 .../tpcds_sf1000/hint}/query26.groovy | 0 .../tpcds_sf1000/hint}/query27.groovy | 0 .../tpcds_sf1000/hint}/query28.groovy | 0 .../tpcds_sf1000/hint}/query29.groovy | 0 .../tpcds_sf1000/hint}/query3.groovy | 0 .../tpcds_sf1000/hint}/query30.groovy | 0 .../tpcds_sf1000/hint}/query31.groovy | 0 .../tpcds_sf1000/hint}/query32.groovy | 0 .../tpcds_sf1000/hint}/query34.groovy | 0 .../tpcds_sf1000/hint}/query36.groovy | 0 .../tpcds_sf1000/hint}/query37.groovy | 0 .../tpcds_sf1000/hint}/query38.groovy | 0 .../tpcds_sf1000/hint}/query39.groovy | 0 .../tpcds_sf1000/hint}/query4.groovy | 0 .../tpcds_sf1000/hint}/query40.groovy | 0 .../tpcds_sf1000/hint}/query41.groovy | 0 .../tpcds_sf1000/hint}/query42.groovy | 0 .../tpcds_sf1000/hint}/query43.groovy | 0 .../tpcds_sf1000/hint}/query44.groovy | 0 .../tpcds_sf1000/hint}/query45.groovy | 0 .../tpcds_sf1000/hint}/query46.groovy | 0 .../tpcds_sf1000/hint}/query47.groovy | 0 .../tpcds_sf1000/hint}/query48.groovy | 0 .../tpcds_sf1000/hint}/query49.groovy | 0 .../tpcds_sf1000/hint}/query5.groovy | 0 .../tpcds_sf1000/hint}/query50.groovy | 0 .../tpcds_sf1000/hint}/query51.groovy | 0 .../tpcds_sf1000/hint}/query52.groovy | 0 .../tpcds_sf1000/hint}/query53.groovy | 0 .../tpcds_sf1000/hint}/query54.groovy | 0 .../tpcds_sf1000/hint}/query55.groovy | 0 .../tpcds_sf1000/hint}/query56.groovy | 0 .../tpcds_sf1000/hint}/query57.groovy | 0 .../tpcds_sf1000/hint}/query58.groovy | 0 .../tpcds_sf1000/hint}/query59.groovy | 0 .../tpcds_sf1000/hint}/query6.groovy | 0 .../tpcds_sf1000/hint}/query60.groovy | 0 .../tpcds_sf1000/hint}/query61.groovy | 0 .../tpcds_sf1000/hint}/query62.groovy | 0 .../tpcds_sf1000/hint}/query63.groovy | 0 .../tpcds_sf1000/hint}/query64.groovy | 0 .../tpcds_sf1000/hint}/query65.groovy | 0 .../tpcds_sf1000/hint}/query66.groovy | 0 .../tpcds_sf1000/hint}/query67.groovy | 0 .../tpcds_sf1000/hint}/query68.groovy | 0 .../tpcds_sf1000/hint}/query69.groovy | 0 .../tpcds_sf1000/hint}/query7.groovy | 0 .../tpcds_sf1000/hint}/query70.groovy | 0 .../tpcds_sf1000/hint}/query71.groovy | 0 .../tpcds_sf1000/hint}/query72.groovy | 0 .../tpcds_sf1000/hint}/query73.groovy | 0 .../tpcds_sf1000/hint}/query74.groovy | 0 .../tpcds_sf1000/hint}/query75.groovy | 0 .../tpcds_sf1000/hint}/query76.groovy | 0 .../tpcds_sf1000/hint}/query77.groovy | 0 .../tpcds_sf1000/hint}/query78.groovy | 0 .../tpcds_sf1000/hint}/query79.groovy | 0 .../tpcds_sf1000/hint}/query8.groovy | 0 .../tpcds_sf1000/hint}/query80.groovy | 0 .../tpcds_sf1000/hint}/query81.groovy | 0 .../tpcds_sf1000/hint}/query82.groovy | 0 .../tpcds_sf1000/hint}/query84.groovy | 0 .../tpcds_sf1000/hint}/query85.groovy | 0 .../tpcds_sf1000/hint}/query86.groovy | 0 .../tpcds_sf1000/hint}/query87.groovy | 0 .../tpcds_sf1000/hint}/query88.groovy | 0 .../tpcds_sf1000/hint}/query89.groovy | 0 .../tpcds_sf1000/hint}/query9.groovy | 0 .../tpcds_sf1000/hint}/query90.groovy | 0 .../tpcds_sf1000/hint}/query91.groovy | 0 .../tpcds_sf1000/hint}/query92.groovy | 0 .../tpcds_sf1000/hint}/query93.groovy | 0 .../tpcds_sf1000/hint}/query94.groovy | 0 .../tpcds_sf1000/hint}/query95.groovy | 0 .../tpcds_sf1000/hint}/query96.groovy | 0 .../tpcds_sf1000/hint}/query97.groovy | 0 .../tpcds_sf1000/hint}/query98.groovy | 0 .../tpcds_sf1000/hint}/query99.groovy | 0 .../tpcds_sf1000}/load.groovy | 0 .../tpcds_sf1000}/shape/query1.groovy | 0 .../tpcds_sf1000}/shape/query10.groovy | 0 .../tpcds_sf1000}/shape/query11.groovy | 0 .../tpcds_sf1000}/shape/query12.groovy | 0 .../tpcds_sf1000}/shape/query13.groovy | 0 .../tpcds_sf1000}/shape/query14.groovy | 0 .../tpcds_sf1000}/shape/query15.groovy | 0 .../tpcds_sf1000}/shape/query16.groovy | 0 .../tpcds_sf1000}/shape/query17.groovy | 0 .../tpcds_sf1000}/shape/query18.groovy | 0 .../tpcds_sf1000}/shape/query19.groovy | 0 .../tpcds_sf1000}/shape/query2.groovy | 0 .../tpcds_sf1000}/shape/query20.groovy | 0 .../tpcds_sf1000}/shape/query21.groovy | 0 .../tpcds_sf1000}/shape/query22.groovy | 0 .../tpcds_sf1000}/shape/query23.groovy | 0 .../tpcds_sf1000}/shape/query24.groovy | 0 .../tpcds_sf1000}/shape/query25.groovy | 0 .../tpcds_sf1000}/shape/query26.groovy | 0 .../tpcds_sf1000}/shape/query27.groovy | 0 .../tpcds_sf1000}/shape/query28.groovy | 0 .../tpcds_sf1000}/shape/query29.groovy | 0 .../tpcds_sf1000}/shape/query3.groovy | 0 .../tpcds_sf1000}/shape/query30.groovy | 0 .../tpcds_sf1000}/shape/query31.groovy | 0 .../tpcds_sf1000}/shape/query32.groovy | 0 .../tpcds_sf1000}/shape/query33.groovy | 0 .../tpcds_sf1000}/shape/query34.groovy | 0 .../tpcds_sf1000}/shape/query35.groovy | 0 .../tpcds_sf1000}/shape/query36.groovy | 0 .../tpcds_sf1000}/shape/query37.groovy | 0 .../tpcds_sf1000}/shape/query38.groovy | 0 .../tpcds_sf1000}/shape/query39.groovy | 0 .../tpcds_sf1000}/shape/query4.groovy | 0 .../tpcds_sf1000}/shape/query40.groovy | 0 .../tpcds_sf1000}/shape/query41.groovy | 0 .../tpcds_sf1000}/shape/query42.groovy | 0 .../tpcds_sf1000}/shape/query43.groovy | 0 .../tpcds_sf1000}/shape/query44.groovy | 0 .../tpcds_sf1000}/shape/query45.groovy | 0 .../tpcds_sf1000}/shape/query46.groovy | 0 .../tpcds_sf1000}/shape/query47.groovy | 0 .../tpcds_sf1000}/shape/query48.groovy | 0 .../tpcds_sf1000}/shape/query49.groovy | 0 .../tpcds_sf1000}/shape/query5.groovy | 0 .../tpcds_sf1000}/shape/query50.groovy | 0 .../tpcds_sf1000}/shape/query51.groovy | 0 .../tpcds_sf1000}/shape/query52.groovy | 0 .../tpcds_sf1000}/shape/query53.groovy | 0 .../tpcds_sf1000}/shape/query54.groovy | 0 .../tpcds_sf1000}/shape/query55.groovy | 0 .../tpcds_sf1000}/shape/query56.groovy | 0 .../tpcds_sf1000}/shape/query57.groovy | 0 .../tpcds_sf1000}/shape/query58.groovy | 0 .../tpcds_sf1000}/shape/query59.groovy | 0 .../tpcds_sf1000}/shape/query6.groovy | 0 .../tpcds_sf1000}/shape/query60.groovy | 0 .../tpcds_sf1000}/shape/query61.groovy | 0 .../tpcds_sf1000}/shape/query62.groovy | 0 .../tpcds_sf1000}/shape/query63.groovy | 0 .../tpcds_sf1000}/shape/query64.groovy | 0 .../tpcds_sf1000}/shape/query65.groovy | 0 .../tpcds_sf1000}/shape/query66.groovy | 0 .../tpcds_sf1000}/shape/query67.groovy | 0 .../tpcds_sf1000}/shape/query68.groovy | 0 .../tpcds_sf1000}/shape/query69.groovy | 0 .../tpcds_sf1000}/shape/query7.groovy | 0 .../tpcds_sf1000}/shape/query70.groovy | 0 .../tpcds_sf1000}/shape/query71.groovy | 0 .../tpcds_sf1000}/shape/query72.groovy | 0 .../tpcds_sf1000}/shape/query73.groovy | 0 .../tpcds_sf1000}/shape/query74.groovy | 0 .../tpcds_sf1000}/shape/query75.groovy | 0 .../tpcds_sf1000}/shape/query76.groovy | 0 .../tpcds_sf1000}/shape/query77.groovy | 0 .../tpcds_sf1000}/shape/query78.groovy | 0 .../tpcds_sf1000}/shape/query79.groovy | 0 .../tpcds_sf1000}/shape/query8.groovy | 0 .../tpcds_sf1000}/shape/query80.groovy | 0 .../tpcds_sf1000}/shape/query81.groovy | 0 .../tpcds_sf1000}/shape/query82.groovy | 0 .../tpcds_sf1000}/shape/query83.groovy | 0 .../tpcds_sf1000}/shape/query84.groovy | 0 .../tpcds_sf1000}/shape/query85.groovy | 0 .../tpcds_sf1000}/shape/query86.groovy | 0 .../tpcds_sf1000}/shape/query87.groovy | 0 .../tpcds_sf1000}/shape/query88.groovy | 0 .../tpcds_sf1000}/shape/query89.groovy | 0 .../tpcds_sf1000}/shape/query9.groovy | 0 .../tpcds_sf1000}/shape/query90.groovy | 0 .../tpcds_sf1000}/shape/query91.groovy | 0 .../tpcds_sf1000}/shape/query92.groovy | 0 .../tpcds_sf1000}/shape/query93.groovy | 0 .../tpcds_sf1000}/shape/query94.groovy | 0 .../tpcds_sf1000}/shape/query95.groovy | 0 .../tpcds_sf1000}/shape/query96.groovy | 0 .../tpcds_sf1000}/shape/query97.groovy | 0 .../tpcds_sf1000}/shape/query98.groovy | 0 .../tpcds_sf1000}/shape/query99.groovy | 0 .../tpcds_sf10t_orc}/ddl/gen_shape.py | 0 .../tpcds_sf10t_orc}/ddl/shape.tmpl | 0 .../tpcds_sf10t_orc}/load.groovy | 0 .../tpcds_sf10t_orc}/shape/query1.groovy | 0 .../tpcds_sf10t_orc}/shape/query10.groovy | 0 .../tpcds_sf10t_orc}/shape/query11.groovy | 0 .../tpcds_sf10t_orc}/shape/query12.groovy | 0 .../tpcds_sf10t_orc}/shape/query13.groovy | 0 .../tpcds_sf10t_orc}/shape/query14.groovy | 0 .../tpcds_sf10t_orc}/shape/query15.groovy | 0 .../tpcds_sf10t_orc}/shape/query16.groovy | 0 .../tpcds_sf10t_orc}/shape/query17.groovy | 0 .../tpcds_sf10t_orc}/shape/query18.groovy | 0 .../tpcds_sf10t_orc}/shape/query19.groovy | 0 .../tpcds_sf10t_orc}/shape/query2.groovy | 0 .../tpcds_sf10t_orc}/shape/query20.groovy | 0 .../tpcds_sf10t_orc}/shape/query21.groovy | 0 .../tpcds_sf10t_orc}/shape/query22.groovy | 0 .../tpcds_sf10t_orc}/shape/query23.groovy | 0 .../tpcds_sf10t_orc}/shape/query24.groovy | 0 .../tpcds_sf10t_orc}/shape/query25.groovy | 0 .../tpcds_sf10t_orc}/shape/query26.groovy | 0 .../tpcds_sf10t_orc}/shape/query27.groovy | 0 .../tpcds_sf10t_orc}/shape/query28.groovy | 0 .../tpcds_sf10t_orc}/shape/query29.groovy | 0 .../tpcds_sf10t_orc}/shape/query3.groovy | 0 .../tpcds_sf10t_orc}/shape/query30.groovy | 0 .../tpcds_sf10t_orc}/shape/query31.groovy | 0 .../tpcds_sf10t_orc}/shape/query32.groovy | 0 .../tpcds_sf10t_orc}/shape/query33.groovy | 0 .../tpcds_sf10t_orc}/shape/query34.groovy | 0 .../tpcds_sf10t_orc}/shape/query35.groovy | 0 .../tpcds_sf10t_orc}/shape/query36.groovy | 0 .../tpcds_sf10t_orc}/shape/query37.groovy | 0 .../tpcds_sf10t_orc}/shape/query38.groovy | 0 .../tpcds_sf10t_orc}/shape/query39.groovy | 0 .../tpcds_sf10t_orc}/shape/query4.groovy | 0 .../tpcds_sf10t_orc}/shape/query40.groovy | 0 .../tpcds_sf10t_orc}/shape/query41.groovy | 0 .../tpcds_sf10t_orc}/shape/query42.groovy | 0 .../tpcds_sf10t_orc}/shape/query43.groovy | 0 .../tpcds_sf10t_orc}/shape/query44.groovy | 0 .../tpcds_sf10t_orc}/shape/query45.groovy | 0 .../tpcds_sf10t_orc}/shape/query46.groovy | 0 .../tpcds_sf10t_orc}/shape/query47.groovy | 0 .../tpcds_sf10t_orc}/shape/query48.groovy | 0 .../tpcds_sf10t_orc}/shape/query49.groovy | 0 .../tpcds_sf10t_orc}/shape/query5.groovy | 0 .../tpcds_sf10t_orc}/shape/query50.groovy | 0 .../tpcds_sf10t_orc}/shape/query51.groovy | 0 .../tpcds_sf10t_orc}/shape/query52.groovy | 0 .../tpcds_sf10t_orc}/shape/query53.groovy | 0 .../tpcds_sf10t_orc}/shape/query54.groovy | 0 .../tpcds_sf10t_orc}/shape/query55.groovy | 0 .../tpcds_sf10t_orc}/shape/query56.groovy | 0 .../tpcds_sf10t_orc}/shape/query57.groovy | 0 .../tpcds_sf10t_orc}/shape/query58.groovy | 0 .../tpcds_sf10t_orc}/shape/query59.groovy | 0 .../tpcds_sf10t_orc}/shape/query6.groovy | 0 .../tpcds_sf10t_orc}/shape/query60.groovy | 0 .../tpcds_sf10t_orc}/shape/query61.groovy | 0 .../tpcds_sf10t_orc}/shape/query62.groovy | 0 .../tpcds_sf10t_orc}/shape/query63.groovy | 0 .../tpcds_sf10t_orc}/shape/query64.groovy | 0 .../tpcds_sf10t_orc}/shape/query65.groovy | 0 .../tpcds_sf10t_orc}/shape/query66.groovy | 0 .../tpcds_sf10t_orc}/shape/query67.groovy | 0 .../tpcds_sf10t_orc}/shape/query68.groovy | 0 .../tpcds_sf10t_orc}/shape/query69.groovy | 0 .../tpcds_sf10t_orc}/shape/query7.groovy | 0 .../tpcds_sf10t_orc}/shape/query70.groovy | 0 .../tpcds_sf10t_orc}/shape/query71.groovy | 0 .../tpcds_sf10t_orc}/shape/query72.groovy | 0 .../tpcds_sf10t_orc}/shape/query73.groovy | 0 .../tpcds_sf10t_orc}/shape/query74.groovy | 0 .../tpcds_sf10t_orc}/shape/query75.groovy | 0 .../tpcds_sf10t_orc}/shape/query76.groovy | 0 .../tpcds_sf10t_orc}/shape/query77.groovy | 0 .../tpcds_sf10t_orc}/shape/query78.groovy | 0 .../tpcds_sf10t_orc}/shape/query79.groovy | 0 .../tpcds_sf10t_orc}/shape/query8.groovy | 0 .../tpcds_sf10t_orc}/shape/query80.groovy | 0 .../tpcds_sf10t_orc}/shape/query81.groovy | 0 .../tpcds_sf10t_orc}/shape/query82.groovy | 0 .../tpcds_sf10t_orc}/shape/query83.groovy | 0 .../tpcds_sf10t_orc}/shape/query84.groovy | 0 .../tpcds_sf10t_orc}/shape/query85.groovy | 0 .../tpcds_sf10t_orc}/shape/query86.groovy | 0 .../tpcds_sf10t_orc}/shape/query87.groovy | 0 .../tpcds_sf10t_orc}/shape/query88.groovy | 0 .../tpcds_sf10t_orc}/shape/query89.groovy | 0 .../tpcds_sf10t_orc}/shape/query9.groovy | 0 .../tpcds_sf10t_orc}/shape/query90.groovy | 0 .../tpcds_sf10t_orc}/shape/query91.groovy | 0 .../tpcds_sf10t_orc}/shape/query92.groovy | 0 .../tpcds_sf10t_orc}/shape/query93.groovy | 0 .../tpcds_sf10t_orc}/shape/query94.groovy | 0 .../tpcds_sf10t_orc}/shape/query95.groovy | 0 .../tpcds_sf10t_orc}/shape/query96.groovy | 0 .../tpcds_sf10t_orc}/shape/query97.groovy | 0 .../tpcds_sf10t_orc}/shape/query98.groovy | 0 .../tpcds_sf10t_orc}/shape/query99.groovy | 0 .../tpch_sf1000/hint}/q10.groovy | 0 .../tpch_sf1000/hint}/q11.groovy | 0 .../tpch_sf1000/hint}/q12.groovy | 0 .../tpch_sf1000/hint}/q13.groovy | 0 .../tpch_sf1000/hint}/q14.groovy | 0 .../tpch_sf1000/hint}/q15.groovy | 0 .../tpch_sf1000/hint}/q17.groovy | 0 .../tpch_sf1000/hint}/q19.groovy | 0 .../tpch_sf1000/hint}/q3.groovy | 0 .../tpch_sf1000/hint}/q4.groovy | 0 .../tpch_sf1000/hint}/q5.groovy | 0 .../tpch_sf1000/hint}/q7.groovy | 0 .../tpch_sf1000/hint}/q8.groovy | 0 .../tpch_sf1000/hint}/q9.groovy | 0 .../tpch_sf1000}/load.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q1.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q10.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q11.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q12.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q13.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q14.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q15.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q16.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q17.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q18.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q19.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q2.groovy | 0 .../nostats_rf_prune/q20-rewrite.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q20.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q21.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q22.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q3.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q4.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q5.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q6.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q7.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q8.groovy | 0 .../tpch_sf1000}/nostats_rf_prune/q9.groovy | 0 .../tpch_sf1000}/rf_prune/q1.groovy | 0 .../tpch_sf1000}/rf_prune/q10.groovy | 0 .../tpch_sf1000}/rf_prune/q11.groovy | 0 .../tpch_sf1000}/rf_prune/q12.groovy | 0 .../tpch_sf1000}/rf_prune/q13.groovy | 0 .../tpch_sf1000}/rf_prune/q14.groovy | 0 .../tpch_sf1000}/rf_prune/q15.groovy | 0 .../tpch_sf1000}/rf_prune/q16.groovy | 0 .../tpch_sf1000}/rf_prune/q17.groovy | 0 .../tpch_sf1000}/rf_prune/q18.groovy | 0 .../tpch_sf1000}/rf_prune/q19.groovy | 0 .../tpch_sf1000}/rf_prune/q2.groovy | 0 .../tpch_sf1000}/rf_prune/q20-rewrite.groovy | 0 .../tpch_sf1000}/rf_prune/q20.groovy | 0 .../tpch_sf1000}/rf_prune/q21.groovy | 0 .../tpch_sf1000}/rf_prune/q22.groovy | 0 .../tpch_sf1000}/rf_prune/q3.groovy | 0 .../tpch_sf1000}/rf_prune/q4.groovy | 0 .../tpch_sf1000}/rf_prune/q5.groovy | 0 .../tpch_sf1000}/rf_prune/q6.groovy | 0 .../tpch_sf1000}/rf_prune/q7.groovy | 0 .../tpch_sf1000}/rf_prune/q8.groovy | 0 .../tpch_sf1000}/rf_prune/q9.groovy | 0 .../runtime_filter/test_pushdown_setop.groovy | 0 .../tpch_sf1000}/shape/q1.groovy | 0 .../tpch_sf1000}/shape/q10.groovy | 0 .../tpch_sf1000}/shape/q11.groovy | 0 .../tpch_sf1000}/shape/q12.groovy | 0 .../tpch_sf1000}/shape/q13.groovy | 0 .../tpch_sf1000}/shape/q14.groovy | 0 .../tpch_sf1000}/shape/q15.groovy | 0 .../tpch_sf1000}/shape/q16.groovy | 0 .../tpch_sf1000}/shape/q17.groovy | 0 .../tpch_sf1000}/shape/q18.groovy | 0 .../tpch_sf1000}/shape/q19.groovy | 0 .../tpch_sf1000}/shape/q2.groovy | 0 .../tpch_sf1000}/shape/q20-rewrite.groovy | 0 .../tpch_sf1000}/shape/q20.groovy | 0 .../tpch_sf1000}/shape/q21.groovy | 0 .../tpch_sf1000}/shape/q22.groovy | 0 .../tpch_sf1000}/shape/q3.groovy | 0 .../tpch_sf1000}/shape/q4.groovy | 0 .../tpch_sf1000}/shape/q5.groovy | 0 .../tpch_sf1000}/shape/q6.groovy | 0 .../tpch_sf1000}/shape/q7.groovy | 0 .../tpch_sf1000}/shape/q8.groovy | 0 .../tpch_sf1000}/shape/q9.groovy | 0 .../tpch_sf1000}/shape_no_stats/q1.groovy | 0 .../tpch_sf1000}/shape_no_stats/q10.groovy | 0 .../tpch_sf1000}/shape_no_stats/q11.groovy | 0 .../tpch_sf1000}/shape_no_stats/q12.groovy | 0 .../tpch_sf1000}/shape_no_stats/q13.groovy | 0 .../tpch_sf1000}/shape_no_stats/q14.groovy | 0 .../tpch_sf1000}/shape_no_stats/q15.groovy | 0 .../tpch_sf1000}/shape_no_stats/q16.groovy | 0 .../tpch_sf1000}/shape_no_stats/q17.groovy | 0 .../tpch_sf1000}/shape_no_stats/q18.groovy | 0 .../tpch_sf1000}/shape_no_stats/q19.groovy | 0 .../tpch_sf1000}/shape_no_stats/q2.groovy | 0 .../shape_no_stats/q20-rewrite.groovy | 0 .../tpch_sf1000}/shape_no_stats/q20.groovy | 0 .../tpch_sf1000}/shape_no_stats/q21.groovy | 0 .../tpch_sf1000}/shape_no_stats/q22.groovy | 0 .../tpch_sf1000}/shape_no_stats/q3.groovy | 0 .../tpch_sf1000}/shape_no_stats/q4.groovy | 0 .../tpch_sf1000}/shape_no_stats/q5.groovy | 0 .../tpch_sf1000}/shape_no_stats/q6.groovy | 0 .../tpch_sf1000}/shape_no_stats/q7.groovy | 0 .../tpch_sf1000}/shape_no_stats/q8.groovy | 0 .../tpch_sf1000}/shape_no_stats/q9.groovy | 0 3132 files changed, 12 insertions(+), 101353 deletions(-) delete mode 100644 regression-test/data/nereids_hint_tpch_p0/shape/q15.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query1.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query10.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query11.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query12.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query13.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query14.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query15.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query16.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query17.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query18.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query19.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query2.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query20.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query21.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query22.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query23.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query24.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query25.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query26.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query27.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query28.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query29.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query3.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query30.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query31.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query32.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query33.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query34.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query35.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query36.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query37.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query38.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query39.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query4.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query40.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query41.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query42.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query43.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query5.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query6.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query7.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query8.out delete mode 100644 regression-test/data/new_shapes_p0/clickbench/query9.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpcds/shape/query1.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpcds/shape/query24.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpcds/shape/query64.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpcds/shape/query67.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpcds/shape/query72.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpcds/shape/query78.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q10.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q11.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q12.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q13.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q14.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q15.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q17.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q19.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q3.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q4.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q5.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q7.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q8.out delete mode 100644 regression-test/data/new_shapes_p0/hint_tpch/shape/q9.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/flat.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.1.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.2.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.3.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.1.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.2.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.3.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.1.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.2.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.3.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.4.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.1.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.2.out delete mode 100644 regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.3.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/constraints/query23.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query1.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query10.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query11.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query12.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query13.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query14.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query15.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query16.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query17.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query18.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query19.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query2.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query20.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query21.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query22.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query23.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query24.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query25.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query26.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query27.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query28.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query29.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query3.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query30.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query31.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query32.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query33.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query34.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query35.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query36.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query37.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query38.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query39.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query4.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query40.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query41.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query42.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query43.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query44.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query45.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query46.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query47.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query48.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query49.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query5.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query50.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query51.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query52.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query53.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query54.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query55.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query56.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query57.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query58.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query59.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query6.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query60.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query61.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query62.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query63.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query64.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query65.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query66.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query67.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query68.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query69.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query7.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query70.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query71.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query72.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query73.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query74.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query75.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query76.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query77.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query78.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query79.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query8.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query80.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query81.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query82.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query83.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query84.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query85.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query86.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query87.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query88.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query89.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query9.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query90.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query91.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query92.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query93.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query94.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query95.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query96.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query97.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query98.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query99.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query1.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query10.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query11.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query12.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query13.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query14.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query15.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query16.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query17.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query18.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query19.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query2.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query20.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query21.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query22.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query23.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query24.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query25.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query26.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query27.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query28.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query29.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query3.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query30.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query31.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query32.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query33.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query34.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query35.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query36.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query37.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query38.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query39.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query4.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query40.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query41.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query42.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query43.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query44.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query45.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query46.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query47.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query48.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query49.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query5.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query50.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query51.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query52.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query53.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query54.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query55.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query56.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query57.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query58.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query59.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query6.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query60.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query61.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query62.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query63.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query64.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query65.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query66.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query67.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query68.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query69.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query7.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query70.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query71.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query72.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query73.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query74.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query75.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query76.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query77.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query78.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query79.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query8.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query80.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query81.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query82.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query83.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query84.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query85.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query86.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query87.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query88.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query89.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query9.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query90.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query91.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query92.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query93.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query94.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query95.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query96.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query97.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query98.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf100/shape/query99.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query1.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query10.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query11.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query12.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query13.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query14.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query15.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query16.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query17.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query18.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query19.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query2.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query20.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query21.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query22.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query23.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query24.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query25.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query26.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query27.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query28.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query29.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query3.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query30.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query31.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query32.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query33.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query34.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query35.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query36.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query37.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query38.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query39.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query4.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query40.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query41.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query42.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query43.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query44.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query45.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query46.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query47.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query48.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query49.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query5.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query50.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query51.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query52.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query53.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query54.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query55.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query56.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query57.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query58.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query59.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query6.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query60.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query61.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query62.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query63.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query64.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query65.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query66.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query67.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query68.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query69.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query7.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query70.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query71.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query72.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query73.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query74.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query75.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query76.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query77.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query78.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query79.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query8.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query80.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query81.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query82.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query83.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query84.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query85.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query86.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query87.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query88.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query89.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query9.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query90.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query91.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query92.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query93.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query94.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query95.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query96.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query97.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query98.out delete mode 100644 regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query99.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q1.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q10.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q11.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q12.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q13.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q14.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q15.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q16.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q17.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q18.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q19.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q2.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q21.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q22.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q3.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q4.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q5.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q6.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q7.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q8.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q9.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q1.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q10.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q11.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q12.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q13.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q14.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q15.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q16.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q17.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q18.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q19.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q2.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q21.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q22.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q3.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q4.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q5.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q6.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q7.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q8.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape/q9.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.out delete mode 100644 regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.out rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query1.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query10.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query11.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query12.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query13.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query14.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query15.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query16.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query17.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query18.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query19.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query2.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query20.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query21.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query22.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query23.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query24.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query25.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query26.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query27.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query28.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query29.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query3.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query30.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query31.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query32.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query33.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query34.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query35.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query36.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query37.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query38.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query39.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query4.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query40.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query41.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query42.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query43.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query5.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query6.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query7.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query8.out (100%) rename regression-test/data/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query9.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/flat.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q1.1.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q1.2.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q1.3.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q2.1.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q2.2.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q2.3.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q3.1.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q3.2.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q3.3.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q3.4.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q4.1.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q4.2.out (100%) rename regression-test/data/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q4.3.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/constraints/query23.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query1.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query10.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query11.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query12.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query13.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query14.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query15.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query16.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query17.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query18.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query19.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query2.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query20.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query21.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query22.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query23.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query24.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query25.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query26.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query27.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query28.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query29.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query3.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query30.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query31.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query32.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query33.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query34.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query35.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query36.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query37.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query38.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query39.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query4.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query40.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query41.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query42.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/shape => shape_check/tpcds_sf100/noStatsRfPrune}/query43.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query44.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query45.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query46.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query47.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query48.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query49.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query5.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query50.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query51.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query52.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query53.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query54.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query55.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query56.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query57.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query58.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query59.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query6.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query60.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query61.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query62.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query63.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query64.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query65.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query66.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query67.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query68.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query69.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query7.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query70.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query71.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query72.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query73.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query74.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query75.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query76.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query77.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query78.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query79.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query8.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query80.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query81.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query82.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query83.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query84.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query85.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query86.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query87.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query88.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query89.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf100/noStatsRfPrune}/query9.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query90.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query91.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query92.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query93.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query94.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query95.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query96.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query97.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query98.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query99.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query1.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query10.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query11.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query12.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query13.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query14.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query15.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query16.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query17.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query18.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query19.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query2.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query20.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query21.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query22.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query23.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query24.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query25.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query26.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query27.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query28.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query29.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query3.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query30.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query31.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query32.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query33.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query34.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query35.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query36.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query37.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query38.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query39.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query4.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query40.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query41.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query42.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0/noStatsRfPrune => shape_check/tpcds_sf100/no_stats_shape}/query43.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query44.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query45.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query46.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query47.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query48.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query49.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query5.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query50.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query51.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query52.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query53.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query54.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query55.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query56.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query57.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query58.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query59.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query6.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query60.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query61.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query62.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query63.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query64.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query65.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query66.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query67.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query68.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query69.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query7.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query70.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query71.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query72.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query73.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query74.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query75.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query76.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query77.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query78.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query79.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query8.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query80.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query81.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query82.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query83.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query84.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query85.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query86.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query87.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query88.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query89.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/shape => shape_check/tpcds_sf100/no_stats_shape}/query9.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query90.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query91.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query92.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query93.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query94.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query95.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query96.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query97.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query98.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query99.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query1.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query10.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query11.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query12.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query13.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query14.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query15.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query16.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query17.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query18.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query19.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query2.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query20.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query21.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query22.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query23.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query24.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query25.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query26.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query27.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query28.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query29.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/shape => shape_check/tpcds_sf100/rf_prune}/query3.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query30.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query31.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query32.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query33.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query34.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query35.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query36.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query37.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query38.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query39.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query4.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query40.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query41.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query42.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0/no_stats_shape => shape_check/tpcds_sf100/rf_prune}/query43.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query44.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query45.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query46.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query47.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query48.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query49.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query5.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query50.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query51.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query52.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query53.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query54.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query55.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query56.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query57.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query58.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query59.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query6.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query60.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query61.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query62.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query63.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query64.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query65.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query66.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query67.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query68.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query69.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query7.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query70.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query71.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query72.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query73.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query74.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query75.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query76.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query77.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query78.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query79.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query8.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query80.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query81.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query82.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query83.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query84.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query85.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query86.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query87.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query88.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query89.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0/noStatsRfPrune => shape_check/tpcds_sf100/rf_prune}/query9.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query90.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query91.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query92.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query93.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query94.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query95.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query96.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query97.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query98.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query99.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query1.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query10.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query11.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query12.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query13.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query14.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query15.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query16.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query17.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query18.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query19.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf100}/shape/query2.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query20.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query21.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query22.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query23.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query24.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query25.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query26.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query27.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query28.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query29.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0/rf_prune => shape_check/tpcds_sf100/shape}/query3.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query30.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query31.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query32.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query33.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query34.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query35.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query36.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query37.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query38.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query39.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf100}/shape/query4.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query40.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query41.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query42.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0/rf_prune => shape_check/tpcds_sf100/shape}/query43.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query44.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query45.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query46.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query47.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query48.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query49.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf100}/shape/query5.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf100}/shape/query50.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query51.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query52.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query53.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query54.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query55.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query56.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query57.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query58.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query59.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0 => shape_check/tpcds_sf100}/shape/query6.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query60.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query61.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query62.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query63.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query64.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query65.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query66.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query67.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query68.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query69.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query7.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query70.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query71.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query72.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query73.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query74.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query75.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query76.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query77.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query78.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query79.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape => shape_check/tpcds_sf100/shape}/query8.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query80.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query81.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query82.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query83.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query84.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query85.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query86.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query87.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query88.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query89.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0/no_stats_shape => shape_check/tpcds_sf100/shape}/query9.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query90.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query91.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query92.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query93.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query94.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query95.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query96.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query97.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query98.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query99.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query13.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query19.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query44.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/bs_downgrade_shape}/query45.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query54.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/bs_downgrade_shape}/query56.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query6.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query61.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query68.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/shape => shape_check/tpcds_sf1000/bs_downgrade_shape}/query8.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query91.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/bs_downgrade_shape}/query95.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/eliminate_empty/query10_empty.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query1.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query10.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query11.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query12.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query13.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query14.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query15.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query16.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query17.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query18.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query19.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query2.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query20.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query21.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query22.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query23.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query24.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query25.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query26.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query27.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query28.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query29.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query3.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query30.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query31.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query32.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query33.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query34.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query35.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query36.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query37.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query38.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query39.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query4.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query40.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query41.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query42.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query43.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query44.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape => shape_check/tpcds_sf1000/hint}/query45.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query46.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query47.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query48.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query49.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query5.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query50.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query51.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query52.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query53.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query54.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query55.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape => shape_check/tpcds_sf1000/hint}/query56.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query57.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query58.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query59.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/shape => shape_check/tpcds_sf1000/hint}/query6.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query60.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query61.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query62.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query63.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query64.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query65.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query66.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query67.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query68.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query69.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query7.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query70.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query71.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query72.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query73.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query74.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query75.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query76.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query77.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query78.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query79.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query8.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query80.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query81.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query82.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query83.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query84.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query85.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query86.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query87.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query88.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query89.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0/rf_prune => shape_check/tpcds_sf1000/hint}/query9.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query90.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query91.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query92.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query93.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query94.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape => shape_check/tpcds_sf1000/hint}/query95.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query96.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query97.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query98.out (100%) rename regression-test/data/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query99.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query1.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query10.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query11.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query12.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query13.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query14.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query15.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query16.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query17.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query18.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query19.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query2.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query20.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query21.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query22.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query23.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query24.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query25.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query26.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query27.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query28.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query29.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query3.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query30.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query31.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query32.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query33.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query34.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query35.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query36.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query37.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query38.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query39.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query4.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query40.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query41.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query42.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query43.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query44.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query45.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query46.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query47.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query48.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query49.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query5.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query50.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query51.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query52.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query53.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query54.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query55.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query56.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query57.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query58.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query59.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query6.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query60.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query61.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query62.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query63.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query64.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query65.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query66.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query67.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query68.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query69.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query7.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query70.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query71.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query72.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query73.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query74.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query75.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query76.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query77.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query78.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query79.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query8.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query80.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query81.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query82.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query83.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query84.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query85.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query86.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query87.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query88.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query89.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf1000}/shape/query9.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query90.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query91.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query92.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query93.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query94.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query95.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query96.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query97.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query98.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query99.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query1.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query10.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query11.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query12.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query13.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query14.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query15.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query16.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query17.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query18.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query19.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query2.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query20.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query21.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query22.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query23.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query24.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query25.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query26.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query27.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query28.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query29.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query3.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query30.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query31.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query32.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query33.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query34.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query35.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query36.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query37.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query38.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query39.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query4.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query40.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query41.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query42.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query43.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query44.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query45.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query46.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query47.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query48.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query49.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query5.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query50.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query51.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query52.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query53.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query54.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query55.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query56.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query57.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query58.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query59.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query6.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query60.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query61.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query62.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query63.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query64.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query65.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query66.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query67.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query68.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query69.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query7.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query70.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query71.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query72.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query73.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query74.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query75.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query76.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query77.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query78.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query79.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query8.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query80.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query81.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query82.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query83.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query84.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query85.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query86.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query87.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query88.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query89.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query9.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query90.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query91.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query92.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query93.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query94.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query95.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query96.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query97.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query98.out (100%) rename regression-test/data/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query99.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q1.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q10.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q11.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q12.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q13.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q14.out (82%) rename regression-test/data/{new_shapes_p0/tpch_sf1000/nostats_rf_prune => shape_check/tpch_sf1000/hint}/q15.out (95%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q17.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q19.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q3.out (88%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q4.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q5.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q6.out (100%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q7.out (89%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q8.out (90%) rename regression-test/data/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q9.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q1.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q10.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q11.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q12.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q13.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q14.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q15.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q16.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q17.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q18.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q19.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q2.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q20-rewrite.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q20.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q21.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q22.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q3.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q4.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q5.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q6.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q7.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q8.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q9.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q1.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q10.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q11.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q12.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q13.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q14.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q15.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q16.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q17.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q18.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q19.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q2.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q20-rewrite.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q20.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q21.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q22.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q3.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q4.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q5.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q6.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q7.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q8.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q9.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/runtime_filter/test_pushdown_setop.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q1.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q10.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q11.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q12.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q13.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q14.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q15.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q16.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q17.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q18.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q19.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q2.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q20-rewrite.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q20.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q21.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q22.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q3.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q4.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q5.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q6.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q7.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q8.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q9.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q1.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q10.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q11.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q12.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q13.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q14.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q15.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q16.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q17.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q18.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q19.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q2.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q20-rewrite.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q20.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q21.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q22.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q3.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q4.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q5.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q6.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q7.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q8.out (100%) rename regression-test/data/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q9.out (100%) delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/load.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query23.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query24.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query25.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query26.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query27.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query28.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query29.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query30.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query31.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query32.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query33.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query34.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query35.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query36.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query37.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query38.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query39.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query40.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query41.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query42.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query43.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/clickbench/query9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/ddl/gen_shape.py delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/ddl/shape.tmpl delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/load.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/shape/query1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/shape/query24.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/shape/query64.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/shape/query67.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/shape/query72.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpcds/shape/query78.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/load.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/hint_tpch/shape/q9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/load.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/flat.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/load.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/query23.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_rf_prune.py delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_shape.py delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/rf_prune.tmpl delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/shape.tmpl delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/load.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query23.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query24.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query25.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query26.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query27.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query28.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query29.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query30.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query31.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query32.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query33.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query34.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query35.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query36.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query37.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query38.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query39.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query40.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query41.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query42.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query43.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query44.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query45.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query46.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query47.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query48.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query49.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query50.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query51.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query52.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query53.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query54.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query55.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query56.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query57.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query58.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query59.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query60.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query61.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query62.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query63.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query64.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query65.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query66.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query67.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query68.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query69.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query70.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query71.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query72.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query73.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query74.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query75.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query76.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query77.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query78.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query79.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query80.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query81.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query82.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query83.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query84.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query85.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query86.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query87.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query88.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query89.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query90.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query91.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query92.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query93.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query94.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query95.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query96.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query97.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query98.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query99.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query23.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query24.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query25.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query26.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query27.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query28.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query29.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query30.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query31.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query32.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query33.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query34.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query35.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query36.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query37.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query38.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query39.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query40.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query41.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query42.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query43.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query44.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query45.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query46.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query47.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query48.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query49.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query50.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query51.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query52.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query53.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query54.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query55.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query56.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query57.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query58.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query59.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query60.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query61.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query62.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query63.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query64.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query65.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query66.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query67.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query68.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query69.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query70.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query71.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query72.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query73.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query74.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query75.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query76.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query77.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query78.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query79.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query80.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query81.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query82.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query83.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query84.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query85.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query86.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query87.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query88.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query89.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query90.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query91.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query92.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query93.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query94.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query95.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query96.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query97.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query98.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query99.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf100/shape/tpcds_sf100_stats.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/gen_shape.py delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/shape.tmpl delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/load.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query23.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query24.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query25.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query26.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query27.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query28.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query29.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query30.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query31.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query32.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query33.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query34.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query35.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query36.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query37.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query38.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query39.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query40.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query41.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query42.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query43.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query44.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query45.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query46.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query47.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query48.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query49.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query50.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query51.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query52.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query53.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query54.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query55.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query56.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query57.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query58.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query59.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query60.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query61.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query62.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query63.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query64.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query65.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query66.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query67.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query68.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query69.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query70.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query71.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query72.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query73.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query74.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query75.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query76.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query77.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query78.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query79.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query80.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query81.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query82.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query83.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query84.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query85.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query86.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query87.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query88.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query89.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query90.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query91.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query92.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query93.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query94.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query95.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query96.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query97.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query98.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query99.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/load.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q9.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.groovy delete mode 100644 regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.groovy rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/load.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query1.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query10.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query11.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query12.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query13.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query14.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query15.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query16.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query17.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query18.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query19.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query2.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query20.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query21.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query22.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query23.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query24.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query25.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query26.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query27.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query28.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query29.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query3.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query30.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query31.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query32.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query33.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query34.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query35.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query36.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query37.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query38.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query39.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query4.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query40.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query41.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query42.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query43.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query5.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query6.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query7.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query8.groovy (100%) rename regression-test/suites/{nereids_clickbench_shape_p0 => shape_check/clickbench}/query9.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/load.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/flat.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q1.1.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q1.2.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q1.3.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q2.1.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q2.2.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q2.3.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q3.1.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q3.2.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q3.3.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q3.4.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q4.1.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q4.2.groovy (100%) rename regression-test/suites/{nereids_ssb_shape_sf100_p0 => shape_check/ssb_sf100}/shape/q4.3.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/constraints/load.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/constraints/query23.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/ddl/gen_rf_prune.py (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/ddl/gen_shape.py (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/ddl/rf_prune.tmpl (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/ddl/shape.tmpl (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/load.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query1.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query10.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query11.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query12.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query13.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query14.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query15.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query16.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query17.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query18.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query19.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query2.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query20.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query21.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query22.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query23.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query24.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query25.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query26.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query27.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query28.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query29.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query3.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query30.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query31.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query32.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query33.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query34.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query35.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query36.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query37.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query38.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query39.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query4.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query40.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query41.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query42.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query43.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query44.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query45.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query46.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query47.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query48.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query49.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query5.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query50.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query51.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query52.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query53.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query54.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query55.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query56.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query57.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query58.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query59.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query6.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query60.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query61.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query62.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query63.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query64.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query65.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query66.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query67.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query68.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query69.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query7.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query70.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query71.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query72.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query73.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query74.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query75.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query76.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query77.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query78.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query79.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query8.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query80.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query81.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query82.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query83.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query84.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query85.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query86.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query87.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query88.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query89.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query9.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query90.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query91.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query92.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query93.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query94.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query95.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query96.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query97.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query98.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/noStatsRfPrune/query99.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query1.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query10.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query11.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query12.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query13.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query14.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query15.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query16.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query17.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query18.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query19.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query2.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query20.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query21.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query22.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query23.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query24.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query25.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query26.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query27.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query28.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query29.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query3.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query30.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query31.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query32.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query33.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query34.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query35.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query36.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query37.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query38.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query39.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query4.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query40.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query41.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query42.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query43.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query44.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query45.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query46.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query47.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query48.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query49.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query5.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query50.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query51.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query52.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query53.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query54.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query55.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query56.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query57.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query58.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query59.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query6.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query60.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query61.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query62.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query63.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query64.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query65.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query66.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query67.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query68.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query69.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query7.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query70.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query71.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query72.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query73.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query74.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query75.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query76.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query77.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query78.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query79.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query8.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query80.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query81.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query82.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query83.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query84.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query85.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query86.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query87.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query88.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query89.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query9.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query90.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query91.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query92.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query93.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query94.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query95.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query96.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query97.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query98.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/no_stats_shape/query99.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query1.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query10.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query11.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query12.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query13.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query14.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query15.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query16.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query17.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query18.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query19.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query2.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query20.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query21.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query22.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query23.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query24.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query25.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query26.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query27.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query28.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query29.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query3.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query30.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query31.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query32.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query33.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query34.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query35.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query36.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query37.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query38.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query39.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query4.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query40.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query41.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query42.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query43.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query44.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query45.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query46.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query47.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query48.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query49.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query5.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query50.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query51.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query52.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query53.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query54.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query55.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query56.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query57.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query58.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query59.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query6.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query60.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query61.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query62.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query63.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query64.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query65.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query66.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query67.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query68.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query69.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query7.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query70.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query71.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query72.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query73.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query74.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query75.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query76.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query77.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query78.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query79.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query8.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query80.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query81.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query82.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query83.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query84.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query85.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query86.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query87.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query88.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query89.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query9.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query90.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query91.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query92.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query93.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query94.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query95.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query96.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query97.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query98.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/rf_prune/query99.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query1.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query10.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query11.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query12.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query13.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query14.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query15.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query16.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query17.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query18.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query19.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query2.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query20.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query21.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query22.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query23.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query24.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query25.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query26.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query27.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query28.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query29.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query3.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query30.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query31.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query32.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query33.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query34.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query35.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query36.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query37.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query38.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query39.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query4.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query40.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query41.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query42.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query43.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query44.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query45.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query46.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query47.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query48.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query49.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query5.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query50.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query51.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query52.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query53.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query54.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query55.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query56.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query57.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query58.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query59.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query6.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query60.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query61.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query62.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query63.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query64.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query65.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query66.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query67.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query68.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query69.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query7.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query70.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query71.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query72.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query73.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query74.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query75.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query76.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query77.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query78.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query79.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query8.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query80.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query81.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query82.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query83.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query84.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query85.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query86.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query87.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query88.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query89.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query9.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query90.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query91.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query92.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query93.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query94.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query95.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query96.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query97.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query98.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/query99.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf100_p0 => shape_check/tpcds_sf100}/shape/tpcds_sf100_stats.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query13.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query19.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query44.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query45.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query54.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query56.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query6.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query61.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query68.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query8.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query91.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/bs_downgrade_shape/query95.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/ddl/gen_shape.py (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/ddl/shape.tmpl (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/eliminate_empty/query10_empty.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query1.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query10.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query11.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query12.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query13.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query14.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query15.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query16.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query17.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query18.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query19.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query2.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query20.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query21.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query22.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query23.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query24.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query25.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query26.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query27.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query28.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query29.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query3.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query30.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query31.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query32.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query34.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query36.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query37.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query38.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query39.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query4.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query40.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query41.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query42.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query43.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query44.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query45.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query46.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query47.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query48.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query49.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query5.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query50.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query51.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query52.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query53.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query54.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query55.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query56.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query57.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query58.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query59.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query6.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query60.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query61.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query62.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query63.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query64.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query65.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query66.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query67.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query68.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query69.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query7.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query70.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query71.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query72.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query73.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query74.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query75.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query76.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query77.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query78.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query79.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query8.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query80.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query81.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query82.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query84.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query85.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query86.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query87.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query88.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query89.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query9.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query90.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query91.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query92.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query93.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query94.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query95.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query96.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query97.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query98.groovy (100%) rename regression-test/suites/{nereids_hint_tpcds_p0/shape => shape_check/tpcds_sf1000/hint}/query99.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/load.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query1.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query10.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query11.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query12.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query13.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query14.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query15.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query16.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query17.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query18.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query19.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query2.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query20.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query21.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query22.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query23.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query24.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query25.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query26.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query27.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query28.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query29.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query3.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query30.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query31.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query32.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query33.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query34.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query35.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query36.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query37.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query38.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query39.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query4.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query40.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query41.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query42.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query43.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query44.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query45.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query46.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query47.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query48.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query49.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query5.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query50.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query51.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query52.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query53.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query54.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query55.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query56.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query57.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query58.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query59.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query6.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query60.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query61.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query62.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query63.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query64.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query65.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query66.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query67.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query68.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query69.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query7.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query70.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query71.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query72.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query73.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query74.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query75.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query76.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query77.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query78.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query79.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query8.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query80.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query81.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query82.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query83.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query84.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query85.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query86.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query87.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query88.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query89.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query9.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query90.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query91.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query92.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query93.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query94.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query95.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query96.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query97.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query98.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf1000_p0 => shape_check/tpcds_sf1000}/shape/query99.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/ddl/gen_shape.py (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/ddl/shape.tmpl (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/load.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query1.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query10.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query11.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query12.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query13.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query14.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query15.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query16.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query17.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query18.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query19.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query2.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query20.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query21.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query22.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query23.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query24.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query25.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query26.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query27.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query28.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query29.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query3.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query30.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query31.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query32.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query33.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query34.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query35.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query36.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query37.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query38.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query39.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query4.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query40.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query41.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query42.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query43.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query44.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query45.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query46.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query47.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query48.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query49.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query5.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query50.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query51.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query52.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query53.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query54.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query55.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query56.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query57.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query58.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query59.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query6.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query60.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query61.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query62.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query63.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query64.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query65.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query66.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query67.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query68.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query69.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query7.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query70.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query71.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query72.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query73.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query74.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query75.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query76.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query77.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query78.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query79.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query8.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query80.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query81.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query82.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query83.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query84.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query85.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query86.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query87.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query88.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query89.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query9.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query90.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query91.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query92.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query93.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query94.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query95.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query96.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query97.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query98.groovy (100%) rename regression-test/suites/{nereids_tpcds_shape_sf10t_orc => shape_check/tpcds_sf10t_orc}/shape/query99.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q10.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q11.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q12.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q13.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q14.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q15.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q17.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q19.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q3.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q4.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q5.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q7.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q8.groovy (100%) rename regression-test/suites/{nereids_hint_tpch_p0/shape => shape_check/tpch_sf1000/hint}/q9.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/load.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q1.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q10.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q11.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q12.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q13.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q14.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q15.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q16.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q17.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q18.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q19.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q2.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q20-rewrite.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q20.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q21.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q22.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q3.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q4.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q5.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q6.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q7.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q8.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/nostats_rf_prune/q9.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q1.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q10.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q11.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q12.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q13.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q14.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q15.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q16.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q17.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q18.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q19.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q2.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q20-rewrite.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q20.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q21.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q22.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q3.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q4.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q5.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q6.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q7.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q8.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/rf_prune/q9.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/runtime_filter/test_pushdown_setop.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q1.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q10.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q11.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q12.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q13.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q14.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q15.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q16.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q17.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q18.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q19.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q2.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q20-rewrite.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q20.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q21.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q22.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q3.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q4.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q5.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q6.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q7.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q8.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape/q9.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q1.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q10.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q11.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q12.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q13.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q14.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q15.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q16.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q17.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q18.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q19.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q2.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q20-rewrite.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q20.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q21.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q22.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q3.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q4.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q5.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q6.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q7.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q8.groovy (100%) rename regression-test/suites/{nereids_tpch_shape_sf1000_p0 => shape_check/tpch_sf1000}/shape_no_stats/q9.groovy (100%) diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q15.out b/regression-test/data/nereids_hint_tpch_p0/shape/q15.out deleted file mode 100644 index a88c5e699bd99d..00000000000000 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q15.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -------------------------------PhysicalOlapScan[lineitem] - -Hint log: -Used: leading(supplier revenue0 ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/clickbench/query1.out b/regression-test/data/new_shapes_p0/clickbench/query1.out deleted file mode 100644 index f98c53e3d5fc4e..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query1.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_1 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalStorageLayerAggregate[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query10.out b/regression-test/data/new_shapes_p0/clickbench/query10.out deleted file mode 100644 index c784056436912a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query10.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query11.out b/regression-test/data/new_shapes_p0/clickbench/query11.out deleted file mode 100644 index 4b5e4486d3f4cc..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query11.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_11 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (MobilePhoneModel = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query12.out b/regression-test/data/new_shapes_p0/clickbench/query12.out deleted file mode 100644 index 10928363a83c02..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query12.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (MobilePhoneModel = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query13.out b/regression-test/data/new_shapes_p0/clickbench/query13.out deleted file mode 100644 index ce6675dc3bb26e..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query13.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_13 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query14.out b/regression-test/data/new_shapes_p0/clickbench/query14.out deleted file mode 100644 index 35eedce41b927a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query14.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_14 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query15.out b/regression-test/data/new_shapes_p0/clickbench/query15.out deleted file mode 100644 index bf7f267f0e47be..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query15.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query16.out b/regression-test/data/new_shapes_p0/clickbench/query16.out deleted file mode 100644 index a229f5310dfc2d..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query16.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_16 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query17.out b/regression-test/data/new_shapes_p0/clickbench/query17.out deleted file mode 100644 index 78635481d04652..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query17.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query18.out b/regression-test/data/new_shapes_p0/clickbench/query18.out deleted file mode 100644 index 6af4a027d886c3..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query18.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query19.out b/regression-test/data/new_shapes_p0/clickbench/query19.out deleted file mode 100644 index 7540225b393218..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query19.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query2.out b/regression-test/data/new_shapes_p0/clickbench/query2.out deleted file mode 100644 index 4f4565a083c67b..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query2.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_2 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter(( not (AdvEngineID = 0))) -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query20.out b/regression-test/data/new_shapes_p0/clickbench/query20.out deleted file mode 100644 index 51f1da68b40a5b..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query20.out +++ /dev/null @@ -1,8 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_20 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------filter((hits.UserID = 435090932899640449)) ---------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query21.out b/regression-test/data/new_shapes_p0/clickbench/query21.out deleted file mode 100644 index 104d1b4710532a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query21.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_21 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((URL like '%google%')) -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query22.out b/regression-test/data/new_shapes_p0/clickbench/query22.out deleted file mode 100644 index d5274c3548eb28..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query22.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = '')) and (URL like '%google%')) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query23.out b/regression-test/data/new_shapes_p0/clickbench/query23.out deleted file mode 100644 index 76a91b3ad49968..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query23.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_23 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = '')) and ( not (URL like '%.google.%')) and (Title like '%Google%')) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query24.out b/regression-test/data/new_shapes_p0/clickbench/query24.out deleted file mode 100644 index fd0a2f5b670727..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query24.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_24 -- -PhysicalDeferMaterializeResultSink ---PhysicalDeferMaterializeTopN -----PhysicalDistribute[DistributionSpecGather] -------PhysicalDeferMaterializeTopN ---------filter((URL like '%google%')) -----------PhysicalDeferMaterializeOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query25.out b/regression-test/data/new_shapes_p0/clickbench/query25.out deleted file mode 100644 index 271149db672442..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query25.out +++ /dev/null @@ -1,11 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_25 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter(( not (SearchPhrase = ''))) ---------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query26.out b/regression-test/data/new_shapes_p0/clickbench/query26.out deleted file mode 100644 index 7317f810a3bb23..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query26.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(( not (SearchPhrase = ''))) -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query27.out b/regression-test/data/new_shapes_p0/clickbench/query27.out deleted file mode 100644 index 1dbae1e0dc1a8c..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query27.out +++ /dev/null @@ -1,11 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_27 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter(( not (SearchPhrase = ''))) ---------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query28.out b/regression-test/data/new_shapes_p0/clickbench/query28.out deleted file mode 100644 index e5cb28eab7aa2b..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query28.out +++ /dev/null @@ -1,14 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_28 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((c > 100000)) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(( not (URL = ''))) ---------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query29.out b/regression-test/data/new_shapes_p0/clickbench/query29.out deleted file mode 100644 index 01e642b5b4339f..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query29.out +++ /dev/null @@ -1,14 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((c > 100000)) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(( not (Referer = ''))) ---------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query3.out b/regression-test/data/new_shapes_p0/clickbench/query3.out deleted file mode 100644 index d4fb562b4fdb3f..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query3.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_3 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query30.out b/regression-test/data/new_shapes_p0/clickbench/query30.out deleted file mode 100644 index bad1a26f517088..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query30.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_30 -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query31.out b/regression-test/data/new_shapes_p0/clickbench/query31.out deleted file mode 100644 index a662fac4ef4581..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query31.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_31 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query32.out b/regression-test/data/new_shapes_p0/clickbench/query32.out deleted file mode 100644 index 29828472ccab40..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query32.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_32 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (SearchPhrase = ''))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query33.out b/regression-test/data/new_shapes_p0/clickbench/query33.out deleted file mode 100644 index f47fe46b46005a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query33.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query34.out b/regression-test/data/new_shapes_p0/clickbench/query34.out deleted file mode 100644 index c2b2ed43e72e9f..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query34.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_34 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query35.out b/regression-test/data/new_shapes_p0/clickbench/query35.out deleted file mode 100644 index 12617f3936158c..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query35.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query36.out b/regression-test/data/new_shapes_p0/clickbench/query36.out deleted file mode 100644 index 2d49c7645c7528..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query36.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_36 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query37.out b/regression-test/data/new_shapes_p0/clickbench/query37.out deleted file mode 100644 index 757b4f64df4e98..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query37.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (URL = '')) and (hits.CounterID = 62) and (hits.DontCountHits = 0) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query38.out b/regression-test/data/new_shapes_p0/clickbench/query38.out deleted file mode 100644 index 37d0392a7b2490..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query38.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_38 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (Title = '')) and (hits.CounterID = 62) and (hits.DontCountHits = 0) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query39.out b/regression-test/data/new_shapes_p0/clickbench/query39.out deleted file mode 100644 index 89222c0f0d2abe..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query39.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_39 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (IsLink = 0)) and (hits.CounterID = 62) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsDownload = 0) and (hits.IsRefresh = 0)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query4.out b/regression-test/data/new_shapes_p0/clickbench/query4.out deleted file mode 100644 index 966b6cb7ecca97..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query4.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_4 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query40.out b/regression-test/data/new_shapes_p0/clickbench/query40.out deleted file mode 100644 index d0f5babf3275e1..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query40.out +++ /dev/null @@ -1,14 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((hits.CounterID = 62) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0)) ---------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query41.out b/regression-test/data/new_shapes_p0/clickbench/query41.out deleted file mode 100644 index 8a7019e5969e79..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query41.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((hits.CounterID = 62) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0) and (hits.RefererHash = 3594120000172545465) and TraficSourceID IN (-1, 6)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query42.out b/regression-test/data/new_shapes_p0/clickbench/query42.out deleted file mode 100644 index b4e8bce045c9c8..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query42.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((hits.CounterID = 62) and (hits.DontCountHits = 0) and (hits.EventDate <= '2013-07-31') and (hits.EventDate >= '2013-07-01') and (hits.IsRefresh = 0) and (hits.URLHash = 2868770270353813622)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query43.out b/regression-test/data/new_shapes_p0/clickbench/query43.out deleted file mode 100644 index 80e197103fc1fb..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query43.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((hits.CounterID = 62) and (hits.DontCountHits = 0) and (hits.EventDate <= '2013-07-15') and (hits.EventDate >= '2013-07-14') and (hits.IsRefresh = 0)) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query5.out b/regression-test/data/new_shapes_p0/clickbench/query5.out deleted file mode 100644 index 94b8f2ad28f023..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query5.out +++ /dev/null @@ -1,11 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_5 -- -PhysicalResultSink ---hashAgg[DISTINCT_GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[DISTINCT_LOCAL] ---------hashAgg[GLOBAL] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query6.out b/regression-test/data/new_shapes_p0/clickbench/query6.out deleted file mode 100644 index 75ba24ac143f06..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query6.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_6 -- -PhysicalResultSink ---hashAgg[DISTINCT_GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[DISTINCT_LOCAL] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query7.out b/regression-test/data/new_shapes_p0/clickbench/query7.out deleted file mode 100644 index 565f0c0f71985a..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query7.out +++ /dev/null @@ -1,9 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_7 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalStorageLayerAggregate[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query8.out b/regression-test/data/new_shapes_p0/clickbench/query8.out deleted file mode 100644 index 5bebd9361a2ca6..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query8.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_8 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(( not (AdvEngineID = 0))) -------------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/clickbench/query9.out b/regression-test/data/new_shapes_p0/clickbench/query9.out deleted file mode 100644 index dcece9f0ce72d7..00000000000000 --- a/regression-test/data/new_shapes_p0/clickbench/query9.out +++ /dev/null @@ -1,12 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ckbench_shape_9 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalOlapScan[hits] - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query1.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query1.out deleted file mode 100644 index 401b9bd4b037c9..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query1.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF3 ctr_store_sk->[ctr_store_sk] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ctr_customer_sk->[c_customer_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ctr_store_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((store.s_state = 'TN')) ---------------------------PhysicalOlapScan[store] - -Hint log: -Used: leading(store_returns broadcast date_dim ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query24.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query24.out deleted file mode 100644 index 11fb1e7c9be3e6..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query24.out +++ /dev/null @@ -1,56 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_ticket_number->[ss_ticket_number];RF6 sr_item_sk->[i_item_sk,ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk];RF3 ca_zip->[s_zip] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((store.s_market_id = 5)) ---------------------------------PhysicalOlapScan[store] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] apply RFs: RF6 -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------filter((ssales.i_color = 'aquamarine')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - -Hint log: -Used: leading(store_sales broadcast store shuffle { customer shuffle customer_address } shuffle item shuffle store_returns ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query64.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query64.out deleted file mode 100644 index 26a67aa0d6e85a..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query64.out +++ /dev/null @@ -1,106 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF19 cs_item_sk->[i_item_sk,sr_item_sk,ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF18 p_promo_sk->[ss_promo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF16 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF15 ib_income_band_sk->[hd_income_band_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF14 ca_address_sk->[c_current_addr_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[c_current_hdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF12 ca_address_sk->[ss_addr_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF10 sr_item_sk->[i_item_sk,ss_item_sk];RF11 sr_ticket_number->[ss_ticket_number] -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() build RFs:RF9 hd_demo_sk->[ss_hdemo_sk] -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ss_item_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[c_first_shipto_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF6 cd_demo_sk->[ss_cdemo_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] -------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF8 RF9 RF10 RF11 RF12 RF16 RF17 RF18 RF19 -------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[c_first_sales_date_sk] ---------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 RF7 RF13 RF14 ---------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((item.i_current_price <= 58.00) and (item.i_current_price >= 49.00) and i_color IN ('blush', 'lace', 'lawn', 'misty', 'orange', 'pink')) -----------------------------------------------------------PhysicalOlapScan[item] apply RFs: RF10 RF19 -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() build RFs:RF2 ib_income_band_sk->[hd_income_band_sk] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF2 ---------------------------------------------------------PhysicalProject -----------------------------------------------------------PhysicalOlapScan[income_band] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF19 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[customer_address] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF15 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer_address] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[income_band] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1999, 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------PhysicalOlapScan[promotion] -----------------PhysicalProject -------------------filter((sale > (2 * refund))) ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF0 cr_item_sk->[cs_item_sk];RF1 cr_order_number->[cs_order_number] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 1999)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - -Hint log: -Used: leading(catalog_sales shuffle catalog_returns ) leading({ store_sales { { customer d2 } cd2 } } cd1 d3 item { hd1 ib1 } store_returns ad1 hd2 ad2 ib2 d1 store promotion cs_ui ) leading(cs1 shuffle cs2 ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query67.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query67.out deleted file mode 100644 index e93c8687236c29..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query67.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1228) and (date_dim.d_month_seq >= 1217)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - -Hint log: -Used: leading(store_sales broadcast date_dim broadcast store broadcast item ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query72.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query72.out deleted file mode 100644 index 33ba178690ab03..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query72.out +++ /dev/null @@ -1,59 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() build RFs:RF10 w_warehouse_sk->[inv_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk) and (inventory.inv_date_sk = d2.d_date_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF8 d_date_sk->[inv_date_sk];RF9 cs_item_sk->[inv_item_sk] -----------------------PhysicalOlapScan[inventory] apply RFs: RF8 RF9 RF10 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_week_seq = d2.d_week_seq)) otherCondition=() build RFs:RF7 d_week_seq->[d_week_seq] ---------------------------PhysicalProject -----------------------------hashJoin[RIGHT_OUTER_JOIN bucketShuffle] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() build RFs:RF5 cs_order_number->[cr_order_number];RF6 cs_item_sk->[cr_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF5 RF6 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[cs_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[cs_bill_hdemo_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk) and (catalog_sales.cs_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk];RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 ---------------------------------------------------PhysicalProject -----------------------------------------------------NestedLoopJoin[INNER_JOIN](d3.d_date > days_add(d_date, INTERVAL 5 DAY)) -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((d1.d_year = 1998)) -----------------------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 -----------------------------------------------PhysicalProject -------------------------------------------------filter((household_demographics.hd_buy_potential = '1001-5000')) ---------------------------------------------------PhysicalOlapScan[household_demographics] -------------------------------------------PhysicalProject ---------------------------------------------filter((customer_demographics.cd_marital_status = 'S')) -----------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[promotion] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - -Hint log: -Used: leading(inventory shuffle { catalog_returns shuffle { catalog_sales shuffle { d3 broadcast d1 } broadcast household_demographics shuffle customer_demographics broadcast promotion shuffle item } broadcast d2 } broadcast warehouse ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query78.out b/regression-test/data/new_shapes_p0/hint_tpcds/shape/query78.out deleted file mode 100644 index e57834b15ff42a..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpcds/shape/query78.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_year = 1998)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_year = 1998)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_returns] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1998)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_returns] - -Hint log: -Used: leading(web_sales broadcast date_dim web_returns ) leading(catalog_sales broadcast date_dim catalog_returns ) leading(store_sales broadcast date_dim store_returns ) leading(ss shuffle ws shuffle cs ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q10.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q10.out deleted file mode 100644 index 15d56e664547d1..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q10.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() -------------------PhysicalProject ---------------------filter((lineitem.l_returnflag = 'R')) -----------------------PhysicalOlapScan[lineitem] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) -------------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] - -Hint log: -Used: leading(lineitem shuffle { { customer shuffle orders } broadcast nation } ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q11.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q11.out deleted file mode 100644 index c37989eb189371..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q11.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] -----------------------------PhysicalProject -------------------------------filter((nation.n_name = 'GERMANY')) ---------------------------------PhysicalOlapScan[nation] - -Hint log: -Used: leading(partsupp { supplier nation } ) leading(partsupp { supplier nation } ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q12.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q12.out deleted file mode 100644 index a8710941069079..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q12.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - -Hint log: -Used: leading(orders lineitem ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q13.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q13.out deleted file mode 100644 index 99b5297e0d6a0d..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q13.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - -Hint log: -Used: leading(orders shuffle customer ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q14.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q14.out deleted file mode 100644 index 3633709f96fa8a..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q14.out +++ /dev/null @@ -1,20 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() ---------------PhysicalProject -----------------PhysicalOlapScan[part] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] - -Hint log: -Used: leading(part lineitem ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q15.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q15.out deleted file mode 100644 index a88c5e699bd99d..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q15.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -------------------------------PhysicalOlapScan[lineitem] - -Hint log: -Used: leading(supplier revenue0 ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q17.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q17.out deleted file mode 100644 index a84853ea177561..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q17.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[lineitem] -------------------------PhysicalProject ---------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) -----------------------------PhysicalOlapScan[part] - -Hint log: -Used: leading(lineitem broadcast part ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q19.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q19.out deleted file mode 100644 index ebd4f59d682f8f..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q19.out +++ /dev/null @@ -1,20 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - -Hint log: -Used: leading(lineitem broadcast part ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q3.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q3.out deleted file mode 100644 index b284ef9355077d..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q3.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - -Hint log: -Used: leading(lineitem { orders shuffle customer } ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q4.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q4.out deleted file mode 100644 index 94b49c830c4b45..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q4.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - -Hint log: -Used: leading(lineitem orders ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q5.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q5.out deleted file mode 100644 index 1c001e63b89cfe..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q5.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey) and (customer.c_nationkey = supplier.s_nationkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'ASIA')) -----------------------------------PhysicalOlapScan[region] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - -Hint log: -Used: leading(lineitem orders broadcast { supplier broadcast { nation broadcast region } } shuffle customer ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q7.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q7.out deleted file mode 100644 index 919b8547bb8a69..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q7.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -----------------------PhysicalProject -------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) ---------------------------PhysicalOlapScan[lineitem] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n1.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -----------------------PhysicalProject -------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] ---------------------------PhysicalProject -----------------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------------PhysicalOlapScan[nation] - -Hint log: -Used: leading(lineitem broadcast { supplier broadcast n1 } { orders shuffle { customer broadcast n2 } } ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q8.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q8.out deleted file mode 100644 index 486e40152fb644..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q8.out +++ /dev/null @@ -1,49 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------PhysicalOlapScan[orders] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[lineitem] -------------------------------------PhysicalProject ---------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) -----------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[nation] -------------------------------------PhysicalProject ---------------------------------------filter((region.r_name = 'AMERICA')) -----------------------------------------PhysicalOlapScan[region] ---------------------PhysicalProject -----------------------PhysicalOlapScan[nation] - -Hint log: -Used: leading(supplier { orders { lineitem broadcast part } { customer broadcast { n1 broadcast region } } } broadcast n2 ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/hint_tpch/shape/q9.out b/regression-test/data/new_shapes_p0/hint_tpch/shape/q9.out deleted file mode 100644 index 42e3f4eb2072cd..00000000000000 --- a/regression-test/data/new_shapes_p0/hint_tpch/shape/q9.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] -------------------------------PhysicalProject ---------------------------------filter((p_name like '%green%')) -----------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] - -Hint log: -Used: leading(orders shuffle { lineitem shuffle part } shuffle { supplier broadcast nation } shuffle partsupp ) -UnUsed: -SyntaxError: - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/flat.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/flat.out deleted file mode 100644 index 3a180194ef57b5..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/flat.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((s.s_suppkey = l.lo_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[lo_suppkey] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_custkey = l.lo_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((p.p_partkey = l.lo_partkey)) otherCondition=() build RFs:RF0 p_partkey->[lo_partkey] -----------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 -----------------PhysicalOlapScan[part] -------------PhysicalOlapScan[customer] ---------PhysicalOlapScan[supplier] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.1.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.1.out deleted file mode 100644 index de05bf1c72f84f..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.1.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF0 d_datekey->[lo_orderdate] -------------PhysicalProject ---------------filter((lineorder.lo_discount <= 3) and (lineorder.lo_discount >= 1) and (lineorder.lo_quantity < 25)) -----------------PhysicalOlapScan[lineorder] apply RFs: RF0 -------------PhysicalProject ---------------filter((dates.d_year = 1993)) -----------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.2.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.2.out deleted file mode 100644 index a43ea0bce0fbf6..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.2.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF0 d_datekey->[lo_orderdate] -------------PhysicalProject ---------------filter((lineorder.lo_discount <= 6) and (lineorder.lo_discount >= 4) and (lineorder.lo_quantity <= 35) and (lineorder.lo_quantity >= 26)) -----------------PhysicalOlapScan[lineorder] apply RFs: RF0 -------------PhysicalProject ---------------filter((dates.d_yearmonth = 'Jan1994')) -----------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.3.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.3.out deleted file mode 100644 index 7775cb114f7e58..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q1.3.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF0 d_datekey->[lo_orderdate] -------------PhysicalProject ---------------filter((lineorder.lo_discount <= 7) and (lineorder.lo_discount >= 5) and (lineorder.lo_quantity <= 35) and (lineorder.lo_quantity >= 26)) -----------------PhysicalOlapScan[lineorder] apply RFs: RF0 -------------PhysicalProject ---------------filter((dates.d_weeknuminyear = 6) and (dates.d_year = 1994)) -----------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.1.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.1.out deleted file mode 100644 index c1f86cac10185d..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.1.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[lo_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[lo_partkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((part.p_category = 'MFGR#12')) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------filter((supplier.s_region = 'AMERICA')) ---------------------------PhysicalOlapScan[supplier] -------------------PhysicalProject ---------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.2.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.2.out deleted file mode 100644 index 5b7b82f23355a2..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.2.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[lo_partkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((supplier.s_region = 'ASIA')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter((part.p_brand <= 'MFGR#2228') and (part.p_brand >= 'MFGR#2221')) ---------------------------PhysicalOlapScan[part] -------------------PhysicalProject ---------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.3.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.3.out deleted file mode 100644 index 0523fe55e5bbc9..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q2.3.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[lo_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[lo_partkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((part.p_brand = 'MFGR#2239')) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------filter((supplier.s_region = 'EUROPE')) ---------------------------PhysicalOlapScan[supplier] -------------------PhysicalProject ---------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.1.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.1.out deleted file mode 100644 index 40096b292e84e7..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.1.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((supplier.s_region = 'ASIA')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter((customer.c_region = 'ASIA')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((dates.d_year <= 1997) and (dates.d_year >= 1992)) -----------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.2.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.2.out deleted file mode 100644 index 7d0b454caac190..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.2.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((supplier.s_nation = 'UNITED STATES')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter((customer.c_nation = 'UNITED STATES')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((dates.d_year <= 1997) and (dates.d_year >= 1992)) -----------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.3.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.3.out deleted file mode 100644 index 628f3df9a5831a..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.3.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter(s_city IN ('UNITED KI1', 'UNITED KI5')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter(c_city IN ('UNITED KI1', 'UNITED KI5')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((dates.d_year <= 1997) and (dates.d_year >= 1992)) -----------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.4.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.4.out deleted file mode 100644 index f725ccdbc2c1f7..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q3.4.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter(s_city IN ('UNITED KI1', 'UNITED KI5')) -------------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------filter(c_city IN ('UNITED KI1', 'UNITED KI5')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((dates.d_yearmonth = 'Dec1997')) -----------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.1.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.1.out deleted file mode 100644 index 63d8d12e64f165..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.1.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF3 d_datekey->[lo_orderdate] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF2 p_partkey->[lo_partkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF1 c_custkey->[lo_custkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((supplier.s_region = 'AMERICA')) -----------------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------filter((customer.c_region = 'AMERICA')) -------------------------------PhysicalOlapScan[customer] -----------------------PhysicalProject -------------------------filter(p_mfgr IN ('MFGR#1', 'MFGR#2')) ---------------------------PhysicalOlapScan[part] -------------------PhysicalProject ---------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.2.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.2.out deleted file mode 100644 index efc1e0061ed88d..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.2.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF3 p_partkey->[lo_partkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF2 c_custkey->[lo_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF1 d_datekey->[lo_orderdate] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((supplier.s_region = 'AMERICA')) -----------------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1997, 1998)) -------------------------------PhysicalOlapScan[dates] -----------------------PhysicalProject -------------------------filter((customer.c_region = 'AMERICA')) ---------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter(p_mfgr IN ('MFGR#1', 'MFGR#2')) -----------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.3.out b/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.3.out deleted file mode 100644 index 5bd9ad4782cb3d..00000000000000 --- a/regression-test/data/new_shapes_p0/ssb_sf100/shape/q4.3.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineorder.lo_custkey = customer.c_custkey)) otherCondition=() build RFs:RF3 lo_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_orderdate = dates.d_datekey)) otherCondition=() build RFs:RF2 d_datekey->[lo_orderdate] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[lo_partkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineorder.lo_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[lo_suppkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineorder] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------filter((supplier.s_nation = 'UNITED STATES')) -----------------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------filter((part.p_category = 'MFGR#14')) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------filter(d_year IN (1997, 1998)) ---------------------------PhysicalOlapScan[dates] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/constraints/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/constraints/query23.out deleted file mode 100644 index 923ee0e2966746..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/constraints/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter(d_year IN (2000, 2001, 2002, 2003)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF5 item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -------------------PhysicalProject ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF8 item_sk->[ws_item_sk] -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF7 c_customer_sk->[ws_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 RF8 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.out deleted file mode 100644 index 64854df932af65..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ctr_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((store.s_state = 'SD')) -----------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.out deleted file mode 100644 index 5fb40519b131d5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[c_current_addr_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() -------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter(ca_county IN ('Cochran County', 'Kandiyohi County', 'Marquette County', 'Storey County', 'Warren County')) -------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.out deleted file mode 100644 index 8cab83d94f65ac..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2002) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 2002) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 2001) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 2001) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.out deleted file mode 100644 index b4c126ae67aebf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Books', 'Men', 'Sports')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1998-05-06') and (date_dim.d_date >= '1998-04-06')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.out deleted file mode 100644 index 7b5d0d01ba6a17..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('KS', 'MI', 'SD'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('CO', 'MO', 'ND'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('NH', 'OH', 'TX'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('M', 'S'),cd_education_status IN ('4 yr Degree', 'College'),OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=() build RFs:RF1 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree')]] and cd_education_status IN ('4 yr Degree', 'College', 'Unknown') and cd_marital_status IN ('D', 'M', 'S')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter(hd_dep_count IN (1, 3)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'KS', 'MI', 'MO', 'ND', 'NH', 'OH', 'SD', 'TX')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------filter((date_dim.d_year = 2001)) -----------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.out deleted file mode 100644 index 10192bf86cb782..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.out +++ /dev/null @@ -1,154 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((d3.d_year <= 2002) and (d3.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((d2.d_year <= 2002) and (d2.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((d1.d_year <= 2002) and (d1.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2002) and (date_dim.d_year >= 2000)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 -----------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF15 d_date_sk->[cs_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[item] ---------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF18 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF18 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[item] ---------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.out deleted file mode 100644 index 5825559155b8e7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.out deleted file mode 100644 index c6e88456a7e402..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF3 cc_call_center_sk->[cs_call_center_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[cs_ship_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] ---------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF0 cs_order_number->[cs_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'WV')) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter(cc_county IN ('Barrow County', 'Daviess County', 'Luce County', 'Richland County', 'Ziebach County')) -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.out deleted file mode 100644 index c10cc616923d3c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF6 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((d1.d_quarter_name = '2001Q1')) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.out deleted file mode 100644 index dcda7d5d7eb3ee..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[cs_bill_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF5 -------------------------------------------PhysicalProject ---------------------------------------------filter((cd1.cd_education_status = 'Advanced Degree') and (cd1.cd_gender = 'F')) -----------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 2, 4, 7, 8)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter(ca_state IN ('GA', 'IN', 'ME', 'NC', 'OK', 'WA', 'WY')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 1998)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.out deleted file mode 100644 index 4e2627d552dd1e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 2)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.out deleted file mode 100644 index 8fac9bc6bbbd76..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 1999)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 ---------------PhysicalProject -----------------filter((date_dim.d_year = 1998)) -------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.out deleted file mode 100644 index fa360d9c6fdc83..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Books', 'Shoes', 'Women')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-02-25') and (date_dim.d_date >= '2002-01-26')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.out deleted file mode 100644 index db506f0acaa0e9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[inv_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -----------------------------PhysicalOlapScan[inventory] apply RFs: RF1 RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[warehouse] -------------------------PhysicalProject ---------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_date <= '2002-03-29') and (date_dim.d_date >= '2002-01-28')) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.out deleted file mode 100644 index 7f10ebd7894ce7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_month_seq <= 1199) and (date_dim.d_month_seq >= 1188)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.out deleted file mode 100644 index c5d202bfee2bc3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------PhysicalOlapScan[date_dim] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------PhysicalProject -------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF7 ws_item_sk->[item_sk] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 -------------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -----------------------PhysicalProject -------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.out deleted file mode 100644 index ace0813e2e7117..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_zip->[ca_zip];RF6 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF4 ca_address_sk->[c_current_addr_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF6 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------PhysicalProject -------------------filter((store.s_market_id = 8)) ---------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'beige')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.out deleted file mode 100644 index 80edfc46e41f5d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF6 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter((d1.d_moy = 4) and (d1.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.out deleted file mode 100644 index 52f628f8b600a2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[cs_promo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'S')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) -----------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.out deleted file mode 100644 index 886eca75570635..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'D')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(s_state IN ('AL', 'LA', 'MI', 'MO', 'SC', 'TN')) ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.out deleted file mode 100644 index 7a6bdd8868ef00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 131.00),(store_sales.ss_list_price <= 141.00)],AND[(store_sales.ss_coupon_amt >= 16798.00),(store_sales.ss_coupon_amt <= 17798.00)],AND[(store_sales.ss_wholesale_cost >= 25.00),(store_sales.ss_wholesale_cost <= 45.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 145.00),(store_sales.ss_list_price <= 155.00)],AND[(store_sales.ss_coupon_amt >= 14792.00),(store_sales.ss_coupon_amt <= 15792.00)],AND[(store_sales.ss_wholesale_cost >= 46.00),(store_sales.ss_wholesale_cost <= 66.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 150.00),(store_sales.ss_list_price <= 160.00)],AND[(store_sales.ss_coupon_amt >= 6600.00),(store_sales.ss_coupon_amt <= 7600.00)],AND[(store_sales.ss_wholesale_cost >= 9.00),(store_sales.ss_wholesale_cost <= 29.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 91.00),(store_sales.ss_list_price <= 101.00)],AND[(store_sales.ss_coupon_amt >= 13493.00),(store_sales.ss_coupon_amt <= 14493.00)],AND[(store_sales.ss_wholesale_cost >= 36.00),(store_sales.ss_wholesale_cost <= 56.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 0.00),(store_sales.ss_list_price <= 10.00)],AND[(store_sales.ss_coupon_amt >= 7629.00),(store_sales.ss_coupon_amt <= 8629.00)],AND[(store_sales.ss_wholesale_cost >= 6.00),(store_sales.ss_wholesale_cost <= 26.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 15257.00),(store_sales.ss_coupon_amt <= 16257.00)],AND[(store_sales.ss_wholesale_cost >= 31.00),(store_sales.ss_wholesale_cost <= 51.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.out deleted file mode 100644 index b09148bd528c7b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF6 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF7 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(d_year IN (1999, 2000, 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.out deleted file mode 100644 index 8beaf9b74953fb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((dt.d_moy = 11)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manufact_id = 816)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.out deleted file mode 100644 index 6f1b848ed00034..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[wr_returned_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF1 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2002)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------filter((customer_address.ca_state = 'IN')) -----------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.out deleted file mode 100644 index 6c244739da9ca8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -------------------PhysicalProject ---------------------PhysicalOlapScan[customer_address] ---------------PhysicalProject -----------------filter((ss.d_year = 2000) and d_qoy IN (1, 2, 3)) -------------------PhysicalOlapScan[date_dim] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_sold_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF3 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((ws.d_year = 2000) and d_qoy IN (1, 2, 3)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county,ca_county,ca_county,ca_county] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF7 ca_county->[ca_county,ca_county,ca_county] ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF6 ca_county->[ca_county,ca_county] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 RF7 RF8 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 2000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------PhysicalProject -------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 2000)) ---------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF7 RF8 ---------------------PhysicalProject -----------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 2000)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.out deleted file mode 100644 index 7992f57d1c87b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 29)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-07') and (date_dim.d_date >= '1999-01-07')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.out deleted file mode 100644 index 0416c3f39cb340..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF0 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF4 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF8 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.out deleted file mode 100644 index 15e7650bae15c3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Franklin Parish', 'Luce County', 'Richland County', 'Walker County', 'Williamson County', 'Ziebach County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.out deleted file mode 100644 index 6def6ef536b340..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() -------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.out deleted file mode 100644 index 5e091a8245be48..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_year = 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_state IN ('AL', 'GA', 'MI', 'MO', 'OH', 'SC', 'SD', 'TN')) -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.out deleted file mode 100644 index 2dba5f8dad05c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-22') and (date_dim.d_date >= '1999-02-21')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 75.00) and (item.i_current_price >= 45.00) and i_manufact_id IN (1000, 707, 747, 856)) ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.out deleted file mode 100644 index b0bda7f927c138..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.out deleted file mode 100644 index d906073878075f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[inv_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalOlapScan[inventory] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998) and d_moy IN (1, 2)) -----------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.out deleted file mode 100644 index a083e5a72ef86a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id,customer_id,customer_id,customer_id] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF7 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF7 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.out deleted file mode 100644 index 1465471817388b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((date_dim.d_date <= '2001-05-02') and (date_dim.d_date >= '2001-03-03')) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.out deleted file mode 100644 index 3034a77fe0897a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 788) and (i1.i_manufact_id >= 748)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('aquamarine', 'blue', 'chartreuse', 'chiffon', 'dodger', 'gainsboro', 'tan', 'violet'),i_units IN ('Bunch', 'Dozen', 'Each', 'Ounce', 'Oz', 'Pound', 'Ton', 'Tsp'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'gainsboro'),i_units IN ('Dozen', 'Ounce'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('chiffon', 'violet'),i_units IN ('Pound', 'Ton'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('blue', 'chartreuse'),i_units IN ('Each', 'Oz'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('dodger', 'tan'),i_units IN ('Bunch', 'Tsp'),i_size IN ('economy', 'medium')]]],AND[i_color IN ('almond', 'blanched', 'indian', 'lime', 'peru', 'saddle', 'spring', 'tomato'),i_units IN ('Box', 'Carton', 'Case', 'Dram', 'Gram', 'Pallet', 'Tbl', 'Unknown'),OR[AND[(item.i_category = 'Women'),i_color IN ('blanched', 'tomato'),i_units IN ('Case', 'Tbl'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('almond', 'lime'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('peru', 'saddle'),i_units IN ('Gram', 'Pallet'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('indian', 'spring'),i_units IN ('Carton', 'Unknown'),i_size IN ('economy', 'medium')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'medium', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.out deleted file mode 100644 index 8b26911e6afcaa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((dt.d_moy = 11) and (dt.d_year = 2002)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((item.i_manager_id = 1)) -----------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.out deleted file mode 100644 index c2cc91b7f43043..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -------------PhysicalProject ---------------PhysicalOlapScan[item] apply RFs: RF1 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() ---------------------PhysicalProject -----------------------filter((rnk < 11)) -------------------------PhysicalWindow ---------------------------PhysicalQuickSort[MERGE_SORT] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------PhysicalPartitionTopN -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) ---------------------------------------PhysicalProject -----------------------------------------hashAgg[GLOBAL] -------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------hashAgg[LOCAL] -----------------------------------------------PhysicalProject -------------------------------------------------filter((ss1.ss_store_sk = 146)) ---------------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------------PhysicalProject -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[GLOBAL] -------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------PhysicalProject -------------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) ---------------------------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalProject -----------------------filter((rnk < 11)) -------------------------PhysicalWindow ---------------------------PhysicalQuickSort[MERGE_SORT] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------PhysicalPartitionTopN -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) ---------------------------------------PhysicalProject -----------------------------------------hashAgg[GLOBAL] -------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------hashAgg[LOCAL] -----------------------------------------------PhysicalProject -------------------------------------------------filter((ss1.ss_store_sk = 146)) ---------------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------------PhysicalProject -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[GLOBAL] -------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------PhysicalProject -------------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) ---------------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.out deleted file mode 100644 index 7270fe9092a53b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.out deleted file mode 100644 index 37d045d1ebc4a1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(d_dow IN (0, 6) and d_year IN (1999, 2000, 2001)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Centerville', 'Fairview', 'Five Points', 'Liberty', 'Oak Grove')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 6),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.out deleted file mode 100644 index 77dcc9357f98b8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.out deleted file mode 100644 index aef568a41842bb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'MD', 'MN'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('IL', 'TX', 'VA'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('IN', 'MI', 'WI'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF2 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF1 cd_demo_sk->[ss_cdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'U', 'W')) -------------------------PhysicalOlapScan[customer_demographics] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'IL', 'IN', 'MD', 'MI', 'MN', 'TX', 'VA', 'WI')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------filter((date_dim.d_year = 1999)) -----------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.out deleted file mode 100644 index 0db3fa841189b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF0 wr_order_number->[ws_order_number];RF1 wr_item_sk->[ws_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[web_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF3 cr_order_number->[cs_order_number];RF4 cr_item_sk->[cs_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 RF5 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[catalog_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF6 sr_ticket_number->[ss_ticket_number];RF7 sr_item_sk->[ss_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[store_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.out deleted file mode 100644 index 5187455761becc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_page] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.out deleted file mode 100644 index 8bdf05df39ed98..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[sr_returned_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_ticket_number->[ss_ticket_number];RF1 sr_item_sk->[ss_item_sk];RF2 sr_customer_sk->[ss_customer_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.out deleted file mode 100644 index 470fabc0f31e81..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.out deleted file mode 100644 index 15a333bc25275d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((dt.d_moy = 12) and (dt.d_year = 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manager_id = 1)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.out deleted file mode 100644 index 89dc632eb527c4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.out deleted file mode 100644 index de4dbe32cd1f80..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.out +++ /dev/null @@ -1,72 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] -------------------------------------------------------------PhysicalUnion ---------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------filter((item.i_category = 'Women') and (item.i_class = 'maternity')) -----------------------------------------------------------------PhysicalOlapScan[item] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------PhysicalProject -------------------------------------------------------PhysicalOlapScan[customer] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer_address] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalAssertNumRows ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) ---------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.out deleted file mode 100644 index 20d097ea52f2e0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manager_id = 100)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.out deleted file mode 100644 index ef5a9edba623fc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.out deleted file mode 100644 index 96d8f68090e5de..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(OR[(date_dim.d_year = 1999),AND[(date_dim.d_year = 1998),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2000),(date_dim.d_moy = 1)]] and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[call_center] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() build RFs:RF7 i_category->[i_category];RF8 i_brand->[i_brand];RF9 cc_name->[cc_name];RF10 rn->[(rn - 1)] -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 RF8 RF9 RF10 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 1999)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.out deleted file mode 100644 index 5692578bb7238c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date = '2001-03-24')) -----------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.out deleted file mode 100644 index d9b1a3ef9f8baa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1219) and (d.d_month_seq >= 1208)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1207) and (d.d_month_seq >= 1196)) ---------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.out deleted file mode 100644 index 55144c3f93417e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF4 d_month_seq->[d_month_seq] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer_address] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalAssertNumRows -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.out deleted file mode 100644 index 403d74c71ecae1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.out deleted file mode 100644 index 62da8c9cb21a0f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF7 RF8 RF10 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[customer] apply RFs: RF9 -----------------------------------PhysicalProject -------------------------------------filter((store.s_gmt_offset = -7.00)) ---------------------------------------PhysicalOlapScan[store] -------------------------------PhysicalProject ---------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) -----------------------------------PhysicalOlapScan[promotion] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------------------PhysicalProject ---------------------------------filter((store.s_gmt_offset = -7.00)) -----------------------------------PhysicalOlapScan[store] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.out deleted file mode 100644 index 582c27536d2a1b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_ship_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[ship_mode] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_site] -------------------PhysicalProject ---------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.out deleted file mode 100644 index 9653f6c52199aa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.out deleted file mode 100644 index ac5d0d6d739e29..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF19 i_item_sk->[cr_item_sk,cs_item_sk,sr_item_sk,ss_item_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF4 cs_item_sk->[sr_item_sk,ss_item_sk] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF6 RF19 -----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF19 -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------filter((sale > (2 * refund))) -----------------------------------------------------------------------------hashAgg[GLOBAL] -------------------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() ---------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF19 ---------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF19 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------PhysicalOlapScan[customer] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter(d_year IN (2001, 2002)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[store] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------PhysicalProject -------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[household_demographics] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[income_band] -----------------PhysicalProject -------------------PhysicalOlapScan[income_band] -------------PhysicalProject ---------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) -----------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 2001)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2002)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.out deleted file mode 100644 index c19f18d7bfa1b6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF4 ss_store_sk->[s_store_sk,ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] apply RFs: RF4 -----------------PhysicalProject -------------------PhysicalOlapScan[item] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.out deleted file mode 100644 index 596cb44922b54a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF3 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF7 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------PhysicalOlapScan[ship_mode] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.out deleted file mode 100644 index 2370bce7b8f785..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1217) and (date_dim.d_month_seq >= 1206)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.out deleted file mode 100644 index d645b44a1efaa7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Five Points', 'Pleasant Hill')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = -1)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.out deleted file mode 100644 index e0bbeea823735d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[LEFT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() -----------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF5 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter(ca_state IN ('MI', 'TX', 'VA')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.out deleted file mode 100644 index f47da720468166..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[ss_promo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) -----------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.out deleted file mode 100644 index 9c9d7b7638d6a5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1224) and (d1.d_month_seq >= 1213)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[RIGHT_SEMI_JOIN bucketShuffle] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store] -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_month_seq <= 1224) and (date_dim.d_month_seq >= 1213)) ---------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.out deleted file mode 100644 index 7ae1c5b71ddaf1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1998)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.out deleted file mode 100644 index cb51c1c09a0c37..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk) and (d1.d_week_seq = d2.d_week_seq)) otherCondition=((d3.d_date > cast((cast(d_date as BIGINT) + 5) as DATEV2))) build RFs:RF7 d_week_seq->[d_week_seq];RF8 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF6 hd_demo_sk->[cs_bill_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[inv_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) -----------------------------------------------PhysicalOlapScan[inventory] apply RFs: RF4 -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF6 RF8 -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[promotion] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[catalog_returns] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_marital_status = 'W')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_buy_potential = '501-1000')) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((d1.d_year = 2002)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.out deleted file mode 100644 index 8cfa2b3c64b2b7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Fairfield County', 'Walker County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('501-1000', 'Unknown')) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.out deleted file mode 100644 index 6915274e1a1301..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.0), (year_total / year_total), NULL) > if((year_total > 0.0), (year_total / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.0)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.0)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.out deleted file mode 100644 index 7a6c63c2385f24..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.out +++ /dev/null @@ -1,78 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF6 i_brand_id->[i_brand_id];RF7 i_class_id->[i_class_id];RF8 i_category_id->[i_category_id];RF9 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 1999)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 RF8 RF9 ---------------filter((prev_yr.d_year = 1998)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.out deleted file mode 100644 index 2f21640b079929..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------filter(ss_hdemo_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------filter(ws_bill_addr_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------filter(cs_warehouse_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.out deleted file mode 100644 index 3f4330d7466b08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.out deleted file mode 100644 index a6034ca86ac5c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.out deleted file mode 100644 index 8f9b721f08ee59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter(OR[(household_demographics.hd_dep_count = 5),(household_demographics.hd_vehicle_count > 4)]) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.out deleted file mode 100644 index 40a27a12ac0692..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.out deleted file mode 100644 index 3f2cb3c7fcea3a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[ss_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 RF3 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_returns] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[cs_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_returns] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[catalog_page] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF11 p_promo_sk->[ws_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF10 RF11 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_returns] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_site] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.out deleted file mode 100644 index 8e795ffc2e0eda..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cr_returned_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF1 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2002)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------filter((customer_address.ca_state = 'CA')) -----------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.out deleted file mode 100644 index 7c415fed511e6e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-09-07') and (date_dim.d_date >= '1999-07-09')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 47.00) and (item.i_current_price >= 17.00) and i_manufact_id IN (138, 169, 339, 639)) ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.out deleted file mode 100644 index d069a28a6b032d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[sr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[sr_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cr_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[wr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] apply RFs: RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.out deleted file mode 100644 index 78a1b815e9e0f9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF3 ib_income_band_sk->[hd_income_band_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Oakwood')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[household_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------filter((cast(ib_upper_bound as BIGINT) <= 55806) and (income_band.ib_lower_bound >= 5806)) ---------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.out deleted file mode 100644 index 63fc92a6f489d9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('DE', 'FL', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('ID', 'IN', 'ND'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('IL', 'MT', 'OH'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF7 ca_address_sk->[wr_refunded_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[wr_returning_cdemo_sk];RF5 cd_marital_status->[cd_marital_status];RF6 cd_education_status->[cd_education_status] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF3 cd_demo_sk->[wr_refunded_cdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF0 ws_item_sk->[wr_item_sk];RF1 ws_order_number->[wr_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 RF3 RF4 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_page] -------------------------------------PhysicalProject ---------------------------------------filter(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary')],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -----------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF5 RF6 ---------------------------------PhysicalProject -----------------------------------filter(cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('DE', 'FL', 'ID', 'IL', 'IN', 'MT', 'ND', 'OH', 'TX')) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.out deleted file mode 100644 index 2cb80b5a081379..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((d1.d_month_seq <= 1235) and (d1.d_month_seq >= 1224)) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.out deleted file mode 100644 index 431debd06c471b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.out deleted file mode 100644 index ae9b03a84ef7de..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF22 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF21 hd_demo_sk->[ss_hdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------------PhysicalOlapScan[household_demographics] -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF19 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF18 hd_demo_sk->[ss_hdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------------PhysicalOlapScan[household_demographics] -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF16 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF15 hd_demo_sk->[ss_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF13 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF12 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF10 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF9 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF7 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF6 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF4 t_time_sk->[ss_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF3 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -------------------------PhysicalOlapScan[time_dim] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ss_sold_time_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) -----------------------PhysicalOlapScan[time_dim] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.out deleted file mode 100644 index 2e9294bce91fd2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Electronics', 'Jewelry', 'Shoes'),i_class IN ('athletic', 'portable', 'semi-precious')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'maternity', 'rock')]] and i_category IN ('Electronics', 'Jewelry', 'Men', 'Music', 'Shoes', 'Women') and i_class IN ('accessories', 'athletic', 'maternity', 'portable', 'rock', 'semi-precious')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 1999)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.out deleted file mode 100644 index 1f880a462795bc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF4 t_time_sk->[ws_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF3 hd_demo_sk->[ws_ship_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_dep_count = 2)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour <= 11) and (time_dim.t_hour >= 10)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ws_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ws_ship_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_dep_count = 2)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour <= 17) and (time_dim.t_hour >= 16)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.out deleted file mode 100644 index 6a4b369a5fe1ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF5 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[cr_returning_customer_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[call_center] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((hd_buy_potential like '1001-5000%')) -------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.out deleted file mode 100644 index b31b235e379e59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 320)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-05-27') and (date_dim.d_date >= '2002-02-26')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.out deleted file mode 100644 index 77175ca96ff6bf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF2 r_reason_sk->[sr_reason_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF0 sr_item_sk->[ss_item_sk];RF1 sr_ticket_number->[ss_ticket_number] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF2 -------------------PhysicalProject ---------------------filter((reason.r_reason_desc = 'duplicate purchase')) -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.out deleted file mode 100644 index 75a20cb4e35006..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ws_ship_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] ---------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2000-04-01') and (date_dim.d_date >= '2000-02-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'OK')) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((web_site.web_company_name = 'pri')) -----------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.out deleted file mode 100644 index fce09b1b6046e9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF7 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF7 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF6 web_site_sk->[ws_web_site_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[ws_ship_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_ship_date_sk] -----------------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF3 ws_order_number->[ws_order_number] -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 -------------------------------hashJoin[RIGHT_SEMI_JOIN bucketShuffle] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF2 ws_order_number->[wr_order_number];RF7 ws_order_number->[ws_order_number,ws_order_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() -------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '1999-04-02') and (date_dim.d_date >= '1999-02-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((customer_address.ca_state = 'NC')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------filter((web_site.web_company_name = 'pri')) -------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.out deleted file mode 100644 index 11217d6de3e01b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ss_sold_time_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 3)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) -----------------------PhysicalOlapScan[time_dim] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.out deleted file mode 100644 index 4ebfd5abc0eb1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.out deleted file mode 100644 index 1f92ed1e36f204..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Music', 'Shoes', 'Sports')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-06-19') and (date_dim.d_date >= '2002-05-20')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.out deleted file mode 100644 index e62313c7a3d935..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_ship_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[ship_mode] -----------------------PhysicalProject -------------------------PhysicalOlapScan[call_center] -------------------PhysicalProject ---------------------filter((date_dim.d_month_seq <= 1235) and (date_dim.d_month_seq >= 1224)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.out deleted file mode 100644 index c9404710bfa16f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF3 ctr_store_sk->[ctr_store_sk,s_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ctr_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 RF3 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------filter((store.s_state = 'SD')) -----------------------PhysicalOlapScan[store] apply RFs: RF3 ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.out deleted file mode 100644 index 4fdff8b37961c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[c_current_addr_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter(ca_county IN ('Cochran County', 'Kandiyohi County', 'Marquette County', 'Storey County', 'Warren County')) -------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.out deleted file mode 100644 index e963fc6a8caa7a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2002) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 2002) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 2001) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 2001) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.out deleted file mode 100644 index b4c126ae67aebf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Books', 'Men', 'Sports')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1998-05-06') and (date_dim.d_date >= '1998-04-06')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.out deleted file mode 100644 index 2cac8d809ad124..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('KS', 'MI', 'SD'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('CO', 'MO', 'ND'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('NH', 'OH', 'TX'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('M', 'S'),cd_education_status IN ('4 yr Degree', 'College'),OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=() build RFs:RF1 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree')]] and cd_education_status IN ('4 yr Degree', 'College', 'Unknown') and cd_marital_status IN ('D', 'M', 'S')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter(hd_dep_count IN (1, 3)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'KS', 'MI', 'MO', 'ND', 'NH', 'OH', 'SD', 'TX')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------filter((date_dim.d_year = 2001)) -----------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.out deleted file mode 100644 index 966f8701126465..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.out +++ /dev/null @@ -1,154 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() build RFs:RF6 i_brand_id->[i_brand_id,i_brand_id,i_brand_id];RF7 i_class_id->[i_class_id,i_class_id,i_class_id];RF8 i_category_id->[i_category_id,i_category_id,i_category_id] ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------------------PhysicalProject -----------------------filter((d3.d_year <= 2002) and (d3.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------------------PhysicalProject -----------------------filter((d2.d_year <= 2002) and (d2.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------------------PhysicalProject -----------------------filter((d1.d_year <= 2002) and (d1.d_year >= 2000)) -------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2002) and (date_dim.d_year >= 2000)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF11 i_item_sk->[ss_item_sk,ss_item_sk] ---------------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF10 ss_item_sk->[ss_item_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 RF12 -----------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF11 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF15 d_date_sk->[cs_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF14 ss_item_sk->[cs_item_sk,i_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF13 i_item_sk->[cs_item_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF13 RF14 RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[item] apply RFs: RF14 ---------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF18 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF17 ss_item_sk->[i_item_sk,ws_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF16 i_item_sk->[ws_item_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF16 RF17 RF18 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[item] apply RFs: RF17 ---------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.out deleted file mode 100644 index 81b0bae51498c1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) build RFs:RF1 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF2 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.out deleted file mode 100644 index c6e88456a7e402..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF3 cc_call_center_sk->[cs_call_center_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[cs_ship_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] ---------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF0 cs_order_number->[cs_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'WV')) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter(cc_county IN ('Barrow County', 'Daviess County', 'Luce County', 'Richland County', 'Ziebach County')) -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.out deleted file mode 100644 index 52da90d84ff3a8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF6 RF9 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF5 RF7 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((d1.d_quarter_name = '2001Q1')) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.out deleted file mode 100644 index 22f67b07a4698c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[cs_bill_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF4 RF5 -------------------------------------------PhysicalProject ---------------------------------------------filter((cd1.cd_education_status = 'Advanced Degree') and (cd1.cd_gender = 'F')) -----------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 2, 4, 7, 8)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter(ca_state IN ('GA', 'IN', 'ME', 'NC', 'OK', 'WA', 'WY')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 1998)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.out deleted file mode 100644 index 9d4dfee81c5cc1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[ss_customer_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 2)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.out deleted file mode 100644 index 2be8c8135db7f3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 1999)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 ---------------PhysicalProject -----------------filter((date_dim.d_year = 1998)) -------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.out deleted file mode 100644 index fa360d9c6fdc83..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Books', 'Shoes', 'Women')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-02-25') and (date_dim.d_date >= '2002-01-26')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.out deleted file mode 100644 index 6a3b7ecf26ca2f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[inv_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[inv_warehouse_sk] -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[warehouse] -------------------------PhysicalProject ---------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_date <= '2002-03-29') and (date_dim.d_date >= '2002-01-28')) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.out deleted file mode 100644 index a96dc0686f150d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_month_seq <= 1199) and (date_dim.d_month_seq >= 1188)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.out deleted file mode 100644 index a65c74fa0b81dd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------PhysicalOlapScan[date_dim] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF4 item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 RF5 -------------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------PhysicalProject -------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF7 ws_item_sk->[item_sk] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ws_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF8 -------------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -----------------------PhysicalProject -------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.out deleted file mode 100644 index 5815f02327d472..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_zip->[ca_zip];RF6 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF4 ca_address_sk->[c_current_addr_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_ticket_number->[ss_ticket_number];RF1 sr_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF6 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------PhysicalProject -------------------filter((store.s_market_id = 8)) ---------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'beige')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.out deleted file mode 100644 index 10bab76c77b7f3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF6 RF9 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF5 RF7 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter((d1.d_moy = 4) and (d1.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.out deleted file mode 100644 index edbed407b77921..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[cs_promo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'S')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) -----------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.out deleted file mode 100644 index 3eec2f7437212b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'D')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(s_state IN ('AL', 'LA', 'MI', 'MO', 'SC', 'TN')) ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.out deleted file mode 100644 index 7a6bdd8868ef00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 131.00),(store_sales.ss_list_price <= 141.00)],AND[(store_sales.ss_coupon_amt >= 16798.00),(store_sales.ss_coupon_amt <= 17798.00)],AND[(store_sales.ss_wholesale_cost >= 25.00),(store_sales.ss_wholesale_cost <= 45.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 145.00),(store_sales.ss_list_price <= 155.00)],AND[(store_sales.ss_coupon_amt >= 14792.00),(store_sales.ss_coupon_amt <= 15792.00)],AND[(store_sales.ss_wholesale_cost >= 46.00),(store_sales.ss_wholesale_cost <= 66.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 150.00),(store_sales.ss_list_price <= 160.00)],AND[(store_sales.ss_coupon_amt >= 6600.00),(store_sales.ss_coupon_amt <= 7600.00)],AND[(store_sales.ss_wholesale_cost >= 9.00),(store_sales.ss_wholesale_cost <= 29.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 91.00),(store_sales.ss_list_price <= 101.00)],AND[(store_sales.ss_coupon_amt >= 13493.00),(store_sales.ss_coupon_amt <= 14493.00)],AND[(store_sales.ss_wholesale_cost >= 36.00),(store_sales.ss_wholesale_cost <= 56.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 0.00),(store_sales.ss_list_price <= 10.00)],AND[(store_sales.ss_coupon_amt >= 7629.00),(store_sales.ss_coupon_amt <= 8629.00)],AND[(store_sales.ss_wholesale_cost >= 6.00),(store_sales.ss_wholesale_cost <= 26.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 15257.00),(store_sales.ss_coupon_amt <= 16257.00)],AND[(store_sales.ss_wholesale_cost >= 31.00),(store_sales.ss_wholesale_cost <= 51.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.out deleted file mode 100644 index 6d0d5cb82d5160..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF9 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[sr_returned_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF3 cs_bill_customer_sk->[sr_customer_sk,ss_customer_sk];RF4 cs_item_sk->[sr_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_customer_sk->[ss_customer_sk];RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 RF5 RF6 RF9 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF3 RF4 RF5 RF7 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(d_year IN (1999, 2000, 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.out deleted file mode 100644 index 8beaf9b74953fb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((dt.d_moy = 11)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manufact_id = 816)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.out deleted file mode 100644 index 6671347af5cc6c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[wr_returned_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[wr_returning_addr_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2002)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------filter((customer_address.ca_state = 'IN')) -----------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.out deleted file mode 100644 index 2a86f699341a7b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[ss_addr_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------PhysicalProject ---------------------PhysicalOlapScan[customer_address] ---------------PhysicalProject -----------------filter((ss.d_year = 2000) and d_qoy IN (1, 2, 3)) -------------------PhysicalOlapScan[date_dim] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_sold_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ws_bill_addr_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((ws.d_year = 2000) and d_qoy IN (1, 2, 3)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county,ca_county,ca_county,ca_county] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF7 ca_county->[ca_county,ca_county,ca_county] ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF6 ca_county->[ca_county,ca_county] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 RF7 RF8 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 2000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------PhysicalProject -------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 2000)) ---------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF7 RF8 ---------------------PhysicalProject -----------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 2000)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.out deleted file mode 100644 index 7992f57d1c87b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 29)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-07') and (date_dim.d_date >= '1999-01-07')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.out deleted file mode 100644 index 0416c3f39cb340..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF0 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF4 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF8 i_manufact_id->[i_manufact_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.out deleted file mode 100644 index a19fe778a57647..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Franklin Parish', 'Luce County', 'Richland County', 'Walker County', 'Williamson County', 'Ziebach County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.out deleted file mode 100644 index 83807f4b912bfe..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF5 cd_demo_sk->[c_current_cdemo_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[c_current_addr_sk] -------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.out deleted file mode 100644 index 08593e2e439b92..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_year = 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_state IN ('AL', 'GA', 'MI', 'MO', 'OH', 'SC', 'SD', 'TN')) -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.out deleted file mode 100644 index 2dba5f8dad05c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-22') and (date_dim.d_date >= '1999-02-21')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 75.00) and (item.i_current_price >= 45.00) and i_manufact_id IN (1000, 707, 747, 856)) ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.out deleted file mode 100644 index 2bc9d9fecbca40..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ws_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.out deleted file mode 100644 index 90c507f9c536f5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[inv_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[inv_warehouse_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998) and d_moy IN (1, 2)) -----------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.out deleted file mode 100644 index 9a590246f64a4a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk,ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id,customer_id,customer_id,customer_id] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF7 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF7 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.out deleted file mode 100644 index aae0d788557045..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[cs_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((date_dim.d_date <= '2001-05-02') and (date_dim.d_date >= '2001-03-03')) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.out deleted file mode 100644 index 3034a77fe0897a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 788) and (i1.i_manufact_id >= 748)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('aquamarine', 'blue', 'chartreuse', 'chiffon', 'dodger', 'gainsboro', 'tan', 'violet'),i_units IN ('Bunch', 'Dozen', 'Each', 'Ounce', 'Oz', 'Pound', 'Ton', 'Tsp'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'gainsboro'),i_units IN ('Dozen', 'Ounce'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('chiffon', 'violet'),i_units IN ('Pound', 'Ton'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('blue', 'chartreuse'),i_units IN ('Each', 'Oz'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('dodger', 'tan'),i_units IN ('Bunch', 'Tsp'),i_size IN ('economy', 'medium')]]],AND[i_color IN ('almond', 'blanched', 'indian', 'lime', 'peru', 'saddle', 'spring', 'tomato'),i_units IN ('Box', 'Carton', 'Case', 'Dram', 'Gram', 'Pallet', 'Tbl', 'Unknown'),OR[AND[(item.i_category = 'Women'),i_color IN ('blanched', 'tomato'),i_units IN ('Case', 'Tbl'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('almond', 'lime'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('peru', 'saddle'),i_units IN ('Gram', 'Pallet'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('indian', 'spring'),i_units IN ('Carton', 'Unknown'),i_size IN ('economy', 'medium')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'medium', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.out deleted file mode 100644 index 8b26911e6afcaa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((dt.d_moy = 11) and (dt.d_year = 2002)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((item.i_manager_id = 1)) -----------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.out deleted file mode 100644 index c2cc91b7f43043..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -------------PhysicalProject ---------------PhysicalOlapScan[item] apply RFs: RF1 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() ---------------------PhysicalProject -----------------------filter((rnk < 11)) -------------------------PhysicalWindow ---------------------------PhysicalQuickSort[MERGE_SORT] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------PhysicalPartitionTopN -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) ---------------------------------------PhysicalProject -----------------------------------------hashAgg[GLOBAL] -------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------hashAgg[LOCAL] -----------------------------------------------PhysicalProject -------------------------------------------------filter((ss1.ss_store_sk = 146)) ---------------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------------PhysicalProject -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[GLOBAL] -------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------PhysicalProject -------------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) ---------------------------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalProject -----------------------filter((rnk < 11)) -------------------------PhysicalWindow ---------------------------PhysicalQuickSort[MERGE_SORT] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------PhysicalPartitionTopN -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) ---------------------------------------PhysicalProject -----------------------------------------hashAgg[GLOBAL] -------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------hashAgg[LOCAL] -----------------------------------------------PhysicalProject -------------------------------------------------filter((ss1.ss_store_sk = 146)) ---------------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------------PhysicalProject -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[GLOBAL] -------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------PhysicalProject -------------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) ---------------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.out deleted file mode 100644 index 68d1ef7855a7fd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[ws_bill_customer_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.out deleted file mode 100644 index 4f754d410d4b6d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[ss_customer_sk] -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter(d_dow IN (0, 6) and d_year IN (1999, 2000, 2001)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Centerville', 'Fairview', 'Five Points', 'Liberty', 'Oak Grove')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 6),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF5 -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.out deleted file mode 100644 index 29aa8e6ae22e6c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() build RFs:RF8 i_category->[i_category,i_category];RF9 i_brand->[i_brand,i_brand];RF10 s_store_name->[s_store_name,s_store_name];RF11 s_company_name->[s_company_name,s_company_name];RF12 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 RF11 RF12 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 RF9 RF10 RF11 RF12 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.out deleted file mode 100644 index d994b40da36a4a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'MD', 'MN'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('IL', 'TX', 'VA'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('IN', 'MI', 'WI'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF2 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF1 cd_demo_sk->[ss_cdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'U', 'W')) -------------------------PhysicalOlapScan[customer_demographics] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'IL', 'IN', 'MD', 'MI', 'MN', 'TX', 'VA', 'WI')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------filter((date_dim.d_year = 1999)) -----------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.out deleted file mode 100644 index 0db3fa841189b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF0 wr_order_number->[ws_order_number];RF1 wr_item_sk->[ws_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[web_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF3 cr_order_number->[cs_order_number];RF4 cr_item_sk->[cs_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 RF5 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[catalog_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF6 sr_ticket_number->[ss_ticket_number];RF7 sr_item_sk->[ss_item_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) -------------------------------------------------------------------PhysicalOlapScan[store_returns] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.out deleted file mode 100644 index fde3b7f2fa20c8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk,ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF2 cp_catalog_page_sk->[cr_catalog_page_sk,cs_catalog_page_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_page] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF7 web_site_sk->[ws_web_site_sk,ws_web_site_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.out deleted file mode 100644 index e7941ce875f02f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[sr_returned_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF0 sr_ticket_number->[ss_ticket_number];RF1 sr_item_sk->[ss_item_sk];RF2 sr_customer_sk->[ss_customer_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.out deleted file mode 100644 index 470fabc0f31e81..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.out deleted file mode 100644 index 15a333bc25275d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((dt.d_moy = 12) and (dt.d_year = 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manager_id = 1)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.out deleted file mode 100644 index 04920e65ac6894..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.out deleted file mode 100644 index 96d57c63e6f62b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.out +++ /dev/null @@ -1,72 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF5 s_county->[ca_county];RF6 s_state->[ca_state] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[c_current_addr_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[cs_bill_customer_sk,ws_bill_customer_sk] -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] -------------------------------------------------------------PhysicalUnion ---------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------filter((item.i_category = 'Women') and (item.i_class = 'maternity')) -----------------------------------------------------------------PhysicalOlapScan[item] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------PhysicalProject -------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalAssertNumRows ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) ---------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.out deleted file mode 100644 index 20d097ea52f2e0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter((item.i_manager_id = 100)) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.out deleted file mode 100644 index ef5a9edba623fc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.out deleted file mode 100644 index 88777bc1ff548d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(OR[(date_dim.d_year = 1999),AND[(date_dim.d_year = 1998),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2000),(date_dim.d_moy = 1)]] and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[call_center] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() build RFs:RF7 i_category->[i_category];RF8 i_brand->[i_brand];RF9 cc_name->[cc_name];RF10 rn->[(rn - 1)] -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 RF8 RF9 RF10 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 1999)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.out deleted file mode 100644 index 8664d84096bc44..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date = '2001-03-24')) -----------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.out deleted file mode 100644 index ed22b4adefa2b4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() build RFs:RF4 s_store_id1->[s_store_id];RF5 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] apply RFs: RF4 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1219) and (d.d_month_seq >= 1208)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF5 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1207) and (d.d_month_seq >= 1196)) ---------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.out deleted file mode 100644 index 5f53b66408a500..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) build RFs:RF5 i_category->[i_category] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF4 d_month_seq->[d_month_seq] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ss_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[customer_address] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF5 ---------------------------PhysicalAssertNumRows -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.out deleted file mode 100644 index 403d74c71ecae1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------------------------PhysicalOlapScan[customer_address] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.out deleted file mode 100644 index 62da8c9cb21a0f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF7 RF8 RF10 ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[customer] apply RFs: RF9 -----------------------------------PhysicalProject -------------------------------------filter((store.s_gmt_offset = -7.00)) ---------------------------------------PhysicalOlapScan[store] -------------------------------PhysicalProject ---------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) -----------------------------------PhysicalOlapScan[promotion] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF0 c_customer_sk->[ss_customer_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------------------PhysicalProject ---------------------------------filter((store.s_gmt_offset = -7.00)) -----------------------------------PhysicalOlapScan[store] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.out deleted file mode 100644 index 1f0de64db91bb5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ws_ship_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF2 web_site_sk->[ws_web_site_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF1 sm_ship_mode_sk->[ws_ship_mode_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[ws_warehouse_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[ship_mode] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_site] -------------------PhysicalProject ---------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.out deleted file mode 100644 index d4fb4990da98b8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.out deleted file mode 100644 index b7cf8115b6db75..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF19 i_item_sk->[cr_item_sk,cs_item_sk,sr_item_sk,ss_item_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF18 ib_income_band_sk->[hd_income_band_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() build RFs:RF17 ib_income_band_sk->[hd_income_band_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF16 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF15 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF14 hd_demo_sk->[c_current_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[ss_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF12 p_promo_sk->[ss_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF11 cd_demo_sk->[c_current_cdemo_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF10 cd_demo_sk->[ss_cdemo_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[c_first_shipto_date_sk] -----------------------------------------------------PhysicalProject -------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[c_first_sales_date_sk] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF7 s_store_sk->[ss_store_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF4 cs_item_sk->[sr_item_sk,ss_item_sk] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_item_sk->[ss_item_sk];RF3 sr_ticket_number->[ss_ticket_number] -----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 RF4 RF5 RF6 RF7 RF10 RF12 RF13 RF15 RF19 -----------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF19 -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------filter((sale > (2 * refund))) -----------------------------------------------------------------------------hashAgg[GLOBAL] -------------------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------------------------------------PhysicalProject -------------------------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF0 cr_item_sk->[cs_item_sk];RF1 cr_order_number->[cs_order_number] ---------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF19 ---------------------------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF19 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF8 RF9 RF11 RF14 RF16 -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter(d_year IN (2001, 2002)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[store] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------PhysicalProject -------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF17 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF18 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[income_band] -----------------PhysicalProject -------------------PhysicalOlapScan[income_band] -------------PhysicalProject ---------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) -----------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 2001)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2002)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.out deleted file mode 100644 index 4dd67a91e1e98e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF4 ss_store_sk->[s_store_sk,ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] apply RFs: RF4 -----------------PhysicalProject -------------------PhysicalOlapScan[item] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.out deleted file mode 100644 index 7ef36371976ad6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF3 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[ws_warehouse_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF7 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF4 w_warehouse_sk->[cs_warehouse_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 RF7 -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------PhysicalOlapScan[ship_mode] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.out deleted file mode 100644 index e4a703c25ac818..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1217) and (date_dim.d_month_seq >= 1206)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.out deleted file mode 100644 index 82e85b4fc4697a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[ss_customer_sk] -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Five Points', 'Pleasant Hill')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = -1)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF5 -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.out deleted file mode 100644 index af6d7e8c85a5f6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[LEFT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -----------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF4 RF5 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter(ca_state IN ('MI', 'TX', 'VA')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.out deleted file mode 100644 index 2b6615e0b93b84..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[ss_promo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) -----------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.out deleted file mode 100644 index 866c026a90dd67..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1224) and (d1.d_month_seq >= 1213)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[RIGHT_SEMI_JOIN bucketShuffle] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() build RFs:RF2 s_state->[s_state] -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store] apply RFs: RF2 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_month_seq <= 1224) and (date_dim.d_month_seq >= 1213)) ---------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.out deleted file mode 100644 index 7ae1c5b71ddaf1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1998)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.out deleted file mode 100644 index 27ea6a31a49020..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk) and (d1.d_week_seq = d2.d_week_seq)) otherCondition=((d3.d_date > cast((cast(d_date as BIGINT) + 5) as DATEV2))) build RFs:RF7 d_week_seq->[d_week_seq];RF8 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF6 hd_demo_sk->[cs_bill_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[inv_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk,inv_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[inv_warehouse_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF0 cs_item_sk->[inv_item_sk] -----------------------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 RF4 -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 RF5 RF6 RF8 -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[promotion] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[catalog_returns] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[warehouse] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_marital_status = 'W')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_buy_potential = '501-1000')) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((d1.d_year = 2002)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.out deleted file mode 100644 index a7655a2c3f268a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Fairfield County', 'Walker County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('501-1000', 'Unknown')) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.out deleted file mode 100644 index c32a9187e34e92..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.0), (year_total / year_total), NULL) > if((year_total > 0.0), (year_total / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.0)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.0)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.out deleted file mode 100644 index 7a6c63c2385f24..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.out +++ /dev/null @@ -1,78 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Home')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter(d_year IN (1998, 1999)) -------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF6 i_brand_id->[i_brand_id];RF7 i_class_id->[i_class_id];RF8 i_category_id->[i_category_id];RF9 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 1999)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 RF8 RF9 ---------------filter((prev_yr.d_year = 1998)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.out deleted file mode 100644 index 8f739a1d12b35a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------filter(ss_hdemo_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------filter(ws_bill_addr_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------filter(cs_warehouse_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.out deleted file mode 100644 index cdecac9706c07d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF9 wp_web_page_sk->[ws_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF7 wp_web_page_sk->[wr_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.out deleted file mode 100644 index a6034ca86ac5c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.out deleted file mode 100644 index 1a19308d991441..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter(OR[(household_demographics.hd_dep_count = 5),(household_demographics.hd_vehicle_count > 4)]) -----------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.out deleted file mode 100644 index 5047f4a7878cc1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.out deleted file mode 100644 index e980bad3f49872..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF3 p_promo_sk->[ss_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_returns] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[cs_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF4 cp_catalog_page_sk->[cs_catalog_page_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 RF7 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_returns] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[catalog_page] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF11 p_promo_sk->[ws_promo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF9 web_site_sk->[ws_web_site_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 RF10 RF11 -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_returns] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_site] -------------------------------------PhysicalProject ---------------------------------------filter((item.i_current_price > 50.00)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------PhysicalOlapScan[promotion] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.out deleted file mode 100644 index 8006799a3cc5fd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cr_returned_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[cr_returning_addr_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2002)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ctr_customer_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------PhysicalProject ---------------------filter((customer_address.ca_state = 'CA')) -----------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.out deleted file mode 100644 index 7c415fed511e6e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-09-07') and (date_dim.d_date >= '1999-07-09')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((item.i_current_price <= 47.00) and (item.i_current_price >= 17.00) and i_manufact_id IN (138, 169, 339, 639)) ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.out deleted file mode 100644 index eb9901a16a7b86..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[sr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[sr_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cr_returned_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cr_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] apply RFs: RF13 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[wr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[wr_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.out deleted file mode 100644 index b6aab0b1439f62..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF3 ib_income_band_sk->[hd_income_band_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Oakwood')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[household_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------filter((cast(ib_upper_bound as BIGINT) <= 55806) and (income_band.ib_lower_bound >= 5806)) ---------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.out deleted file mode 100644 index 62c4b147f59a22..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() build RFs:RF9 r_reason_sk->[wr_reason_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('DE', 'FL', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('ID', 'IN', 'ND'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('IL', 'MT', 'OH'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF7 ca_address_sk->[wr_refunded_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[wr_returning_cdemo_sk];RF5 cd_marital_status->[cd_marital_status];RF6 cd_education_status->[cd_education_status] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF3 cd_demo_sk->[wr_refunded_cdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF0 ws_item_sk->[wr_item_sk];RF1 ws_order_number->[wr_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 RF3 RF4 RF7 RF9 ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF8 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_page] -------------------------------------PhysicalProject ---------------------------------------filter(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary')],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -----------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF5 RF6 ---------------------------------PhysicalProject -----------------------------------filter(cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('DE', 'FL', 'ID', 'IL', 'IN', 'MT', 'ND', 'OH', 'TX')) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.out deleted file mode 100644 index 13c2b5c88bc677..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((d1.d_month_seq <= 1235) and (d1.d_month_seq >= 1224)) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.out deleted file mode 100644 index a71098a021052f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ss_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ws_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.out deleted file mode 100644 index ae9b03a84ef7de..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF22 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF21 hd_demo_sk->[ss_hdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------------PhysicalOlapScan[household_demographics] -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF19 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF18 hd_demo_sk->[ss_hdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------------PhysicalOlapScan[household_demographics] -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF16 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF15 hd_demo_sk->[ss_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF13 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF12 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF10 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF9 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF7 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF6 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF4 t_time_sk->[ss_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF3 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -------------------------PhysicalOlapScan[time_dim] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ss_sold_time_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) -----------------------PhysicalOlapScan[time_dim] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.out deleted file mode 100644 index e4d2ae3435f174..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Electronics', 'Jewelry', 'Shoes'),i_class IN ('athletic', 'portable', 'semi-precious')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'maternity', 'rock')]] and i_category IN ('Electronics', 'Jewelry', 'Men', 'Music', 'Shoes', 'Women') and i_class IN ('accessories', 'athletic', 'maternity', 'portable', 'rock', 'semi-precious')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 1999)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.out deleted file mode 100644 index 1f880a462795bc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF4 t_time_sk->[ws_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF3 hd_demo_sk->[ws_ship_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_dep_count = 2)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour <= 11) and (time_dim.t_hour >= 10)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ws_sold_time_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ws_ship_hdemo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_dep_count = 2)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((time_dim.t_hour <= 17) and (time_dim.t_hour >= 16)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.out deleted file mode 100644 index a2e5d4a7660114..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF5 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[cr_returning_customer_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF0 cc_call_center_sk->[cr_call_center_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[call_center] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((hd_buy_potential like '1001-5000%')) -------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.out deleted file mode 100644 index b31b235e379e59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 320)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-05-27') and (date_dim.d_date >= '2002-02-26')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.out deleted file mode 100644 index 77175ca96ff6bf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF2 r_reason_sk->[sr_reason_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF0 sr_item_sk->[ss_item_sk];RF1 sr_ticket_number->[ss_ticket_number] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF2 -------------------PhysicalProject ---------------------filter((reason.r_reason_desc = 'duplicate purchase')) -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.out deleted file mode 100644 index 75a20cb4e35006..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ws_ship_addr_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] ---------------------------hashJoin[LEFT_ANTI_JOIN bucketShuffle] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2000-04-01') and (date_dim.d_date >= '2000-02-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'OK')) ---------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((web_site.web_company_name = 'pri')) -----------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.out deleted file mode 100644 index aaaba243279489..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number];RF1 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF14 RF15 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF14 RF15 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF12 web_site_sk->[ws_web_site_sk];RF13 web_site_sk->[ws_web_site_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF10 ca_address_sk->[ws_ship_addr_sk];RF11 ca_address_sk->[ws_ship_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_ship_date_sk];RF9 d_date_sk->[ws_ship_date_sk] -----------------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF6 ws_order_number->[ws_order_number];RF7 ws_order_number->[ws_order_number] -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 RF7 -------------------------------hashJoin[RIGHT_SEMI_JOIN bucketShuffle] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF4 ws_order_number->[wr_order_number];RF5 ws_order_number->[wr_order_number];RF14 ws_order_number->[ws_order_number,ws_order_number];RF15 ws_order_number->[ws_order_number,ws_order_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF2 wr_order_number->[ws_order_number];RF3 wr_order_number->[ws_order_number] -------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 RF10 RF11 RF12 RF13 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '1999-04-02') and (date_dim.d_date >= '1999-02-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((customer_address.ca_state = 'NC')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------filter((web_site.web_company_name = 'pri')) -------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.out deleted file mode 100644 index 11217d6de3e01b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF1 t_time_sk->[ss_sold_time_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 3)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) -----------------------PhysicalOlapScan[time_dim] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.out deleted file mode 100644 index 4ebfd5abc0eb1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.out deleted file mode 100644 index 1f92ed1e36f204..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter(i_category IN ('Music', 'Shoes', 'Sports')) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-06-19') and (date_dim.d_date >= '2002-05-20')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.out deleted file mode 100644 index addda24e68b119..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_ship_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF1 sm_ship_mode_sk->[cs_ship_mode_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF0 w_warehouse_sk->[cs_warehouse_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[ship_mode] -----------------------PhysicalProject -------------------------PhysicalOlapScan[call_center] -------------------PhysicalProject ---------------------filter((date_dim.d_month_seq <= 1235) and (date_dim.d_month_seq >= 1224)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query1.out deleted file mode 100644 index 9ee4e3a3195cb8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ctr_customer_sk->[c_customer_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[customer] apply RFs: RF3 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ctr_store_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((store.s_state = 'SD')) ---------------------------PhysicalOlapScan[store] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query10.out deleted file mode 100644 index 4dfc2de4cf3fe5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(ca_county IN ('Cochran County', 'Kandiyohi County', 'Marquette County', 'Storey County', 'Warren County')) ---------------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query11.out deleted file mode 100644 index e7ae73f8e00980..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2002) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 2002) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 2001) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 2001) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query12.out deleted file mode 100644 index be61da2020ee40..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '1998-05-06') and (date_dim.d_date >= '1998-04-06')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Men', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query13.out deleted file mode 100644 index 55cadf71bf5d45..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('M', 'S'),cd_education_status IN ('4 yr Degree', 'College'),OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF3 ss_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree')]] and cd_education_status IN ('4 yr Degree', 'College', 'Unknown') and cd_marital_status IN ('D', 'M', 'S')) ---------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('KS', 'MI', 'SD'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('CO', 'MO', 'ND'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('NH', 'OH', 'TX'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF0 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'KS', 'MI', 'MO', 'ND', 'NH', 'OH', 'SD', 'TX')) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_year = 2001)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(hd_dep_count IN (1, 3)) -------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query14.out deleted file mode 100644 index 2a29746e37ef07..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query14.out +++ /dev/null @@ -1,154 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() build RFs:RF6 brand_id->[i_brand_id];RF7 class_id->[i_class_id];RF8 category_id->[i_category_id] ---------PhysicalProject -----------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((d1.d_year <= 2002) and (d1.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter((d2.d_year <= 2002) and (d2.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -------------------------PhysicalProject ---------------------------filter((d3.d_year <= 2002) and (d3.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2002) and (date_dim.d_year >= 2000)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF10 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF13 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF13 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF16 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF16 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query15.out deleted file mode 100644 index c070b7d34c00d4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query16.out deleted file mode 100644 index 1733e793f1f9b0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF3 cs_order_number->[cs_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[cs_ship_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] -------------------------------PhysicalProject ---------------------------------filter((customer_address.ca_state = 'WV')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(cc_county IN ('Barrow County', 'Daviess County', 'Luce County', 'Richland County', 'Ziebach County')) ---------------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query17.out deleted file mode 100644 index 5342955a97aae2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_quarter_name = '2001Q1')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query18.out deleted file mode 100644 index 57183675eb5fc2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[i_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 RF4 -----------------------------------PhysicalProject -------------------------------------filter((cd1.cd_education_status = 'Advanced Degree') and (cd1.cd_gender = 'F')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 2, 4, 7, 8)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(ca_state IN ('GA', 'IN', 'ME', 'NC', 'OK', 'WA', 'WY')) -------------------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query19.out deleted file mode 100644 index 722f7cfee673fc..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ss_customer_sk->[c_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((item.i_manager_id = 2)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query2.out deleted file mode 100644 index 5888221893d260..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1999)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query20.out deleted file mode 100644 index 16785cbee81da3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-02-25') and (date_dim.d_date >= '2002-01-26')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Shoes', 'Women')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query21.out deleted file mode 100644 index 991b448adf9f0c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-03-29') and (date_dim.d_date >= '2002-01-28')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query22.out deleted file mode 100644 index 7f10ebd7894ce7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_month_seq <= 1199) and (date_dim.d_month_seq >= 1188)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query23.out deleted file mode 100644 index 6103f4eb74d6ab..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter(d_year IN (2000, 2001, 2002, 2003)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF8 ws_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF7 c_customer_sk->[ws_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query24.out deleted file mode 100644 index 3d2e80e2381a00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((store.s_market_id = 8)) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'beige')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query25.out deleted file mode 100644 index cbc48e3165ade9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -----------------------------------PhysicalProject -------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query26.out deleted file mode 100644 index 37ede3b355320f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[cs_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'S')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query27.out deleted file mode 100644 index 9b311ff91423bf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'D')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter(s_state IN ('AL', 'LA', 'MI', 'MO', 'SC', 'TN')) ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query28.out deleted file mode 100644 index 7a6bdd8868ef00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 131.00),(store_sales.ss_list_price <= 141.00)],AND[(store_sales.ss_coupon_amt >= 16798.00),(store_sales.ss_coupon_amt <= 17798.00)],AND[(store_sales.ss_wholesale_cost >= 25.00),(store_sales.ss_wholesale_cost <= 45.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 145.00),(store_sales.ss_list_price <= 155.00)],AND[(store_sales.ss_coupon_amt >= 14792.00),(store_sales.ss_coupon_amt <= 15792.00)],AND[(store_sales.ss_wholesale_cost >= 46.00),(store_sales.ss_wholesale_cost <= 66.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 150.00),(store_sales.ss_list_price <= 160.00)],AND[(store_sales.ss_coupon_amt >= 6600.00),(store_sales.ss_coupon_amt <= 7600.00)],AND[(store_sales.ss_wholesale_cost >= 9.00),(store_sales.ss_wholesale_cost <= 29.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 91.00),(store_sales.ss_list_price <= 101.00)],AND[(store_sales.ss_coupon_amt >= 13493.00),(store_sales.ss_coupon_amt <= 14493.00)],AND[(store_sales.ss_wholesale_cost >= 36.00),(store_sales.ss_wholesale_cost <= 56.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 0.00),(store_sales.ss_list_price <= 10.00)],AND[(store_sales.ss_coupon_amt >= 7629.00),(store_sales.ss_coupon_amt <= 8629.00)],AND[(store_sales.ss_wholesale_cost >= 6.00),(store_sales.ss_wholesale_cost <= 26.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 15257.00),(store_sales.ss_coupon_amt <= 16257.00)],AND[(store_sales.ss_wholesale_cost >= 31.00),(store_sales.ss_wholesale_cost <= 51.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query29.out deleted file mode 100644 index 4e04f8042e2e31..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF7 sr_customer_sk->[cs_bill_customer_sk];RF8 sr_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 ---------------------------------------PhysicalProject -----------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------filter(d_year IN (1999, 2000, 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query3.out deleted file mode 100644 index 4092c73d09fd5b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manufact_id = 816)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 11)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query30.out deleted file mode 100644 index 99f54520a25e12..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[wr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'IN')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query31.out deleted file mode 100644 index eb49a9cb10b936..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((ss.d_year = 2000) and d_qoy IN (1, 2, 3)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[customer_address] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF2 ---------------------PhysicalProject -----------------------filter((ws.d_year = 2000) and d_qoy IN (1, 2, 3)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county] -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 2000)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF7 ca_county->[ca_county] ---------------------PhysicalProject -----------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF6 ca_county->[ca_county,ca_county,ca_county] -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 ---------------------------PhysicalProject -----------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 2000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF6 -------------------------PhysicalProject ---------------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 2000)) -----------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query32.out deleted file mode 100644 index 7992f57d1c87b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 29)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-07') and (date_dim.d_date >= '1999-01-07')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query33.out deleted file mode 100644 index 3cc7c048f5784a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF3 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF7 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF7 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF11 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 ws_item_sk->[i_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF10 RF11 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query34.out deleted file mode 100644 index 79c46ccb77bbb8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) -------------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (1998, 1999, 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Franklin Parish', 'Luce County', 'Richland County', 'Walker County', 'Williamson County', 'Ziebach County')) -----------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query35.out deleted file mode 100644 index 1e865046f6cf27..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF5 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query36.out deleted file mode 100644 index 78618ea60dfcc0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((d1.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(s_state IN ('AL', 'GA', 'MI', 'MO', 'OH', 'SC', 'SD', 'TN')) -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query37.out deleted file mode 100644 index cc63716e4ba212..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 75.00) and (item.i_current_price >= 45.00) and i_manufact_id IN (1000, 707, 747, 856)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '1999-04-22') and (date_dim.d_date >= '1999-02-21')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query38.out deleted file mode 100644 index 26d52e9fae9c40..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query39.out deleted file mode 100644 index 899b1a5e0bdd99..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalOlapScan[inventory] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998) and d_moy IN (1, 2)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query4.out deleted file mode 100644 index 980ceef87cedc2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id] -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query40.out deleted file mode 100644 index ade38048fb9732..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2001-05-02') and (date_dim.d_date >= '2001-03-03')) -------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query41.out deleted file mode 100644 index 3034a77fe0897a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 788) and (i1.i_manufact_id >= 748)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('aquamarine', 'blue', 'chartreuse', 'chiffon', 'dodger', 'gainsboro', 'tan', 'violet'),i_units IN ('Bunch', 'Dozen', 'Each', 'Ounce', 'Oz', 'Pound', 'Ton', 'Tsp'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'gainsboro'),i_units IN ('Dozen', 'Ounce'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('chiffon', 'violet'),i_units IN ('Pound', 'Ton'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('blue', 'chartreuse'),i_units IN ('Each', 'Oz'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('dodger', 'tan'),i_units IN ('Bunch', 'Tsp'),i_size IN ('economy', 'medium')]]],AND[i_color IN ('almond', 'blanched', 'indian', 'lime', 'peru', 'saddle', 'spring', 'tomato'),i_units IN ('Box', 'Carton', 'Case', 'Dram', 'Gram', 'Pallet', 'Tbl', 'Unknown'),OR[AND[(item.i_category = 'Women'),i_color IN ('blanched', 'tomato'),i_units IN ('Case', 'Tbl'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('almond', 'lime'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('peru', 'saddle'),i_units IN ('Gram', 'Pallet'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('indian', 'spring'),i_units IN ('Carton', 'Unknown'),i_size IN ('economy', 'medium')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'medium', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query42.out deleted file mode 100644 index 2ca590a9d0d4d1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((item.i_manager_id = 1)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((dt.d_moy = 11) and (dt.d_year = 2002)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query44.out deleted file mode 100644 index 86d157354860a4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 146)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 146)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query45.out deleted file mode 100644 index 40b25ae51ad929..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF3 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query46.out deleted file mode 100644 index 75c9af2b354fae..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (ca_city = bought_city))) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter(d_dow IN (0, 6) and d_year IN (1999, 2000, 2001)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 6),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter(s_city IN ('Centerville', 'Fairview', 'Five Points', 'Liberty', 'Oak Grove')) -----------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=() -----------------PhysicalProject -------------------PhysicalOlapScan[customer] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query47.out deleted file mode 100644 index 048f93392595ec..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query48.out deleted file mode 100644 index ad78a4f21c50ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'MD', 'MN'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('IL', 'TX', 'VA'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('IN', 'MI', 'WI'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF2 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'U', 'W')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 1999)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'IL', 'IN', 'MD', 'MI', 'MN', 'TX', 'VA', 'WI')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query49.out deleted file mode 100644 index 7d0f6b1ce22d0f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF1 ws_order_number->[wr_order_number];RF2 ws_item_sk->[wr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF4 cs_order_number->[cr_order_number];RF5 cs_item_sk->[cr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 RF5 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF7 ss_ticket_number->[sr_ticket_number];RF8 ss_item_sk->[sr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF7 RF8 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query5.out deleted file mode 100644 index 32ac590f71d004..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query50.out deleted file mode 100644 index 2f0a1b10cbff1b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF1 sr_ticket_number->[ss_ticket_number];RF2 sr_item_sk->[ss_item_sk];RF3 sr_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query51.out deleted file mode 100644 index 470fabc0f31e81..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query52.out deleted file mode 100644 index 1eff8fc3ba89c1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 12) and (dt.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query53.out deleted file mode 100644 index 89dc632eb527c4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query54.out deleted file mode 100644 index ca44d791dc42aa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query54.out +++ /dev/null @@ -1,76 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -----------------------------PhysicalProject -------------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF4 s_county->[ca_county];RF5 s_state->[ca_state] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 RF4 RF5 -------------------------------------------------PhysicalProject ---------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 customer_sk->[c_customer_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] ---------------------------------------------------------------------PhysicalUnion -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------filter((item.i_category = 'Women') and (item.i_class = 'maternity')) -------------------------------------------------------------------------PhysicalOlapScan[item] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -----------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query55.out deleted file mode 100644 index e24470e9606c8b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 100)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query56.out deleted file mode 100644 index 97c4f27b14edb6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 cs_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF7 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ws_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query57.out deleted file mode 100644 index 4f23fac89cf958..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 1999),AND[(date_dim.d_year = 1998),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2000),(date_dim.d_moy = 1)]] and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[call_center] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 1999)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query58.out deleted file mode 100644 index 8b1c60c8c19a1b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[cs_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF13 -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-03-24')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-03-24')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query59.out deleted file mode 100644 index dacb8971d1507e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52)) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() build RFs:RF5 s_store_id2->[s_store_id] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1207) and (d.d_month_seq >= 1196)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1219) and (d.d_month_seq >= 1208)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query6.out deleted file mode 100644 index ddb58c3887ed6f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF5 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF1 d_month_seq->[d_month_seq] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------------PhysicalAssertNumRows -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query60.out deleted file mode 100644 index f3678f64aece45..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ss_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Children')) -----------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 ca_address_sk->[cs_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ca_address_sk->[ws_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query61.out deleted file mode 100644 index faf30604b86926..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 ss_item_sk->[i_item_sk] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] apply RFs: RF10 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF9 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF8 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF8 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 ss_sold_date_sk->[d_date_sk] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF6 ss_promo_sk->[p_promo_sk] -----------------------------------PhysicalProject -------------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) ---------------------------------------PhysicalOlapScan[promotion] apply RFs: RF6 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 ---------------------------------------PhysicalProject -----------------------------------------filter((store.s_gmt_offset = -7.00)) -------------------------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 ss_item_sk->[i_item_sk] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] apply RFs: RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 ss_sold_date_sk->[d_date_sk] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((store.s_gmt_offset = -7.00)) ---------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query62.out deleted file mode 100644 index c23bff2b443621..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query63.out deleted file mode 100644 index 9653f6c52199aa..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query64.out deleted file mode 100644 index 6aa36d179b1db1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query64.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF17 ss_customer_sk->[c_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF17 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[household_demographics] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[income_band] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF11 ss_item_sk->[sr_item_sk];RF12 ss_ticket_number->[sr_ticket_number] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_returns] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF10 ss_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_address] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF9 ss_cdemo_sk->[cd_demo_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF9 -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[cr_item_sk,cs_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF3 cs_item_sk->[ss_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 RF8 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter(d_year IN (2001, 2002)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sale > (2 * refund))) ---------------------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF8 -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF8 -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[household_demographics] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[income_band] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[promotion] ---------------------------------------PhysicalProject -----------------------------------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) -------------------------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 2001)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2002)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query65.out deleted file mode 100644 index b3f3cfc9bbf481..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF2 ss_store_sk->[ss_store_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[item] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query66.out deleted file mode 100644 index 6017253a3383c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF0 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF4 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query67.out deleted file mode 100644 index d043d5f25b6c7a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalPartitionTopN ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1217) and (date_dim.d_month_seq >= 1206)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query68.out deleted file mode 100644 index aa07d1b2a42d9b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_current_addr_sk->[ca_address_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] apply RFs: RF5 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ss_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Five Points', 'Pleasant Hill')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = -1)]) ---------------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query69.out deleted file mode 100644 index a68ff0c1138094..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_ship_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF2 c_current_cdemo_sk->[cd_demo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------filter(ca_state IN ('MI', 'TX', 'VA')) -----------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query7.out deleted file mode 100644 index 18425373b08452..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query70.out deleted file mode 100644 index ae5b26647980e7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1224) and (d1.d_month_seq >= 1213)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() build RFs:RF2 s_state->[s_state] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((date_dim.d_month_seq <= 1224) and (date_dim.d_month_seq >= 1213)) -------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query71.out deleted file mode 100644 index 3010f0b574e03b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_manager_id = 1)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1998)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query72.out deleted file mode 100644 index 0964cf6998d4ec..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk) and (inventory.inv_date_sk = d2.d_date_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF6 d_date_sk->[inv_date_sk];RF7 cs_item_sk->[inv_item_sk] -------------------------------PhysicalOlapScan[inventory] apply RFs: RF6 RF7 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_week_seq = d2.d_week_seq)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk)) otherCondition=((d3.d_date > days_add(d_date, INTERVAL 5 DAY))) -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[cs_bill_hdemo_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((household_demographics.hd_buy_potential = '501-1000')) -----------------------------------------------------------PhysicalOlapScan[household_demographics] ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((d1.d_year = 2002)) -------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------PhysicalProject -------------------------------------------------filter((customer_demographics.cd_marital_status = 'W')) ---------------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_returns] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query73.out deleted file mode 100644 index bfc42f79bbc570..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('501-1000', 'Unknown')) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Fairfield County', 'Walker County')) -----------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query74.out deleted file mode 100644 index c3687dadd21872..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.0), (year_total / year_total), NULL) > if((year_total > 0.0), (year_total / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.0)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.0)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query75.out deleted file mode 100644 index 921d754e533285..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query75.out +++ /dev/null @@ -1,73 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF6 ss_ticket_number->[sr_ticket_number];RF7 ss_item_sk->[sr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF10 ws_order_number->[wr_order_number];RF11 ws_item_sk->[wr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_returns] apply RFs: RF10 RF11 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF12 i_brand_id->[i_brand_id];RF13 i_class_id->[i_class_id];RF14 i_category_id->[i_category_id];RF15 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 1999)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 RF13 RF14 RF15 ---------------filter((prev_yr.d_year = 1998)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query76.out deleted file mode 100644 index 668c3625c56841..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 ss_sold_date_sk->[d_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] apply RFs: RF3 -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 ss_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter(ss_hdemo_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 ws_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter(ws_bill_addr_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 cs_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------filter(cs_warehouse_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query77.out deleted file mode 100644 index 3f4330d7466b08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query78.out deleted file mode 100644 index a6034ca86ac5c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query79.out deleted file mode 100644 index f57418546e7fb9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 5),(household_demographics.hd_vehicle_count > 4)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) -----------------------------PhysicalOlapScan[store] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query8.out deleted file mode 100644 index fd4ec64bb81546..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query80.out deleted file mode 100644 index 5afd260e3e1817..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF4 ss_item_sk->[sr_item_sk];RF5 ss_ticket_number->[sr_ticket_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF9 cs_item_sk->[cr_item_sk];RF10 cs_order_number->[cr_order_number] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF8 p_promo_sk->[cs_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cs_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 RF8 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF16 ws_item_sk->[wr_item_sk];RF17 ws_order_number->[wr_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF16 RF17 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF14 p_promo_sk->[ws_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF13 i_item_sk->[ws_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF12 RF13 RF14 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query81.out deleted file mode 100644 index fdc3edc4efb31f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'CA')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query82.out deleted file mode 100644 index 5142d25b09e8a3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 47.00) and (item.i_current_price >= 17.00) and i_manufact_id IN (138, 169, 339, 639)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '1999-09-07') and (date_dim.d_date >= '1999-07-09')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query83.out deleted file mode 100644 index c6d7033d8cecea..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 cr_item_sk->[i_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[cr_returned_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[sr_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[sr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 wr_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF3 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[wr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query84.out deleted file mode 100644 index 82e9098dbdcbb9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF3 c_current_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Oakwood')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF0 ib_income_band_sk->[hd_income_band_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[household_demographics] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((cast(ib_upper_bound as BIGINT) <= 55806) and (income_band.ib_lower_bound >= 5806)) -----------------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query85.out deleted file mode 100644 index 1fed378a9afc1d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF5 wr_returning_cdemo_sk->[cd_demo_sk];RF6 cd_marital_status->[cd_marital_status];RF7 cd_education_status->[cd_education_status] -----------------------------PhysicalProject -------------------------------filter(cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) ---------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF5 RF6 RF7 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF4 wr_refunded_cdemo_sk->[cd_demo_sk] ---------------------------------PhysicalProject -----------------------------------filter(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary')],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('DE', 'FL', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('ID', 'IN', 'ND'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('IL', 'MT', 'OH'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF3 ca_address_sk->[wr_refunded_addr_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF1 ws_item_sk->[wr_item_sk];RF2 ws_order_number->[wr_order_number] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 RF3 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_year = 2000)) -------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('DE', 'FL', 'ID', 'IL', 'IN', 'MT', 'ND', 'OH', 'TX')) -----------------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[reason] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query86.out deleted file mode 100644 index c68c8b30e9e929..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1235) and (d1.d_month_seq >= 1224)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query87.out deleted file mode 100644 index 9bfa302e37c8cf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query88.out deleted file mode 100644 index a619efe9def125..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF22 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF21 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF19 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF18 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF16 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF15 t_time_sk->[ss_sold_time_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) -------------------------------------PhysicalOlapScan[time_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF12 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF10 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF9 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF7 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query89.out deleted file mode 100644 index 2e9294bce91fd2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Electronics', 'Jewelry', 'Shoes'),i_class IN ('athletic', 'portable', 'semi-precious')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'maternity', 'rock')]] and i_category IN ('Electronics', 'Jewelry', 'Men', 'Music', 'Shoes', 'Women') and i_class IN ('accessories', 'athletic', 'maternity', 'portable', 'rock', 'semi-precious')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 1999)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query90.out deleted file mode 100644 index 13607b4ae13f5d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 11) and (time_dim.t_hour >= 10)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 2)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 17) and (time_dim.t_hour >= 16)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 2)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query91.out deleted file mode 100644 index 6593bb83f07994..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cr_returning_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 RF4 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 c_current_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) ---------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------PhysicalProject ---------------------------------------filter((hd_buy_potential like '1001-5000%')) -----------------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query92.out deleted file mode 100644 index b31b235e379e59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 320)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-05-27') and (date_dim.d_date >= '2002-02-26')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query93.out deleted file mode 100644 index 45f02ddf38ee38..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF0 r_reason_sk->[sr_reason_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((reason.r_reason_desc = 'duplicate purchase')) ---------------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query94.out deleted file mode 100644 index 6a25137bf51fcf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF3 ws_order_number->[ws_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF2 web_site_sk->[ws_web_site_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[ws_ship_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((customer_address.ca_state = 'OK')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2000-04-01') and (date_dim.d_date >= '2000-02-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((web_site.web_company_name = 'pri')) ---------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query95.out deleted file mode 100644 index ad87360cc8a6ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF7 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF7 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF6 ws_order_number->[wr_order_number,ws_order_number] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF7 ws_order_number->[ws_order_number,ws_order_number] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'NC')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '1999-04-02') and (date_dim.d_date >= '1999-02-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query96.out deleted file mode 100644 index c50ffa373c8150..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_dep_count = 3)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query97.out deleted file mode 100644 index 4ebfd5abc0eb1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query97.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query98.out deleted file mode 100644 index d1a4251b785e74..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-06-19') and (date_dim.d_date >= '2002-05-20')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Music', 'Shoes', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query99.out deleted file mode 100644 index 6dbec861eaffd3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/rf_prune/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1235) and (date_dim.d_month_seq >= 1224)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query1.out deleted file mode 100644 index 4569832b6d5aa6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ctr_customer_sk->[c_customer_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[customer] apply RFs: RF3 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF2 ctr_store_sk->[ctr_store_sk,s_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ctr_store_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((store.s_state = 'SD')) ---------------------------PhysicalOlapScan[store] apply RFs: RF2 -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query10.out deleted file mode 100644 index 4dfc2de4cf3fe5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy <= 4) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(ca_county IN ('Cochran County', 'Kandiyohi County', 'Marquette County', 'Storey County', 'Warren County')) ---------------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query11.out deleted file mode 100644 index 8abb7de87e97f9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2002) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 2002) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 2001) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 2001) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query12.out deleted file mode 100644 index be61da2020ee40..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '1998-05-06') and (date_dim.d_date >= '1998-04-06')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Men', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query13.out deleted file mode 100644 index dc2239ca3f9701..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('M', 'S'),cd_education_status IN ('4 yr Degree', 'College'),OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF3 ss_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '4 yr Degree')]] and cd_education_status IN ('4 yr Degree', 'College', 'Unknown') and cd_marital_status IN ('D', 'M', 'S')) ---------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('KS', 'MI', 'SD'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('CO', 'MO', 'ND'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('NH', 'OH', 'TX'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF0 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'KS', 'MI', 'MO', 'ND', 'NH', 'OH', 'SD', 'TX')) ---------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------filter((date_dim.d_year = 2001)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(hd_dep_count IN (1, 3)) -------------------------PhysicalOlapScan[household_demographics] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query14.out deleted file mode 100644 index 196a98b5a2f51d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query14.out +++ /dev/null @@ -1,154 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() build RFs:RF6 brand_id->[i_brand_id];RF7 class_id->[i_class_id];RF8 category_id->[i_category_id] ---------PhysicalProject -----------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((d1.d_year <= 2002) and (d1.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter((d2.d_year <= 2002) and (d2.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 -------------------------PhysicalProject ---------------------------filter((d3.d_year <= 2002) and (d3.d_year >= 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2002) and (date_dim.d_year >= 2000)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[ss_item_sk,ss_item_sk] -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF11 ss_item_sk->[ss_item_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF10 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 RF12 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF15 ss_item_sk->[cs_item_sk,i_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF14 i_item_sk->[cs_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF13 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF13 RF14 RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] apply RFs: RF15 -----------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF18 ss_item_sk->[i_item_sk,ws_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF17 i_item_sk->[ws_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF16 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF16 RF17 RF18 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[item] apply RFs: RF18 -----------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query15.out deleted file mode 100644 index b93c82158d296a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) build RFs:RF2 c_customer_sk->[cs_bill_customer_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query16.out deleted file mode 100644 index 1733e793f1f9b0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF3 cs_order_number->[cs_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[cs_ship_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] -------------------------------PhysicalProject ---------------------------------filter((customer_address.ca_state = 'WV')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(cc_county IN ('Barrow County', 'Daviess County', 'Luce County', 'Richland County', 'Ziebach County')) ---------------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query17.out deleted file mode 100644 index 7cc4a196c206c3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_quarter_name = '2001Q1')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 -------------------------------------PhysicalProject ---------------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query18.out deleted file mode 100644 index 57183675eb5fc2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[i_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 RF4 -----------------------------------PhysicalProject -------------------------------------filter((cd1.cd_education_status = 'Advanced Degree') and (cd1.cd_gender = 'F')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 2, 4, 7, 8)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(ca_state IN ('GA', 'IN', 'ME', 'NC', 'OK', 'WA', 'WY')) -------------------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query19.out deleted file mode 100644 index c0852398e70d38..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ss_customer_sk->[c_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((item.i_manager_id = 2)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query2.out deleted file mode 100644 index 988b288ebb81d5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1999)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query20.out deleted file mode 100644 index 16785cbee81da3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-02-25') and (date_dim.d_date >= '2002-01-26')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Shoes', 'Women')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query21.out deleted file mode 100644 index e80000c6353128..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF2 w_warehouse_sk->[inv_warehouse_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-03-29') and (date_dim.d_date >= '2002-01-28')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query22.out deleted file mode 100644 index a96dc0686f150d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_month_seq <= 1199) and (date_dim.d_month_seq >= 1188)) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query23.out deleted file mode 100644 index 0d8b21c19639e9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter(d_year IN (2000, 2001, 2002, 2003)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF8 ws_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF7 c_customer_sk->[ws_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query24.out deleted file mode 100644 index 0dc2c851744de2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_ticket_number->[ss_ticket_number];RF6 sr_item_sk->[i_item_sk,ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ca_zip->[s_zip];RF3 c_customer_sk->[ss_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF3 RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((store.s_market_id = 8)) ---------------------------------PhysicalOlapScan[store] apply RFs: RF2 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] apply RFs: RF6 -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'beige')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query25.out deleted file mode 100644 index e4e15d711aeb70..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 -----------------------------------PhysicalProject -------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 -----------------------------------PhysicalProject -------------------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 2000)) ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query26.out deleted file mode 100644 index 0c3a3f432aeb48..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[cs_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'S')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query27.out deleted file mode 100644 index c6137e774ae1b9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'D')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter(s_state IN ('AL', 'LA', 'MI', 'MO', 'SC', 'TN')) ---------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query28.out deleted file mode 100644 index 7a6bdd8868ef00..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 131.00),(store_sales.ss_list_price <= 141.00)],AND[(store_sales.ss_coupon_amt >= 16798.00),(store_sales.ss_coupon_amt <= 17798.00)],AND[(store_sales.ss_wholesale_cost >= 25.00),(store_sales.ss_wholesale_cost <= 45.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 145.00),(store_sales.ss_list_price <= 155.00)],AND[(store_sales.ss_coupon_amt >= 14792.00),(store_sales.ss_coupon_amt <= 15792.00)],AND[(store_sales.ss_wholesale_cost >= 46.00),(store_sales.ss_wholesale_cost <= 66.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 150.00),(store_sales.ss_list_price <= 160.00)],AND[(store_sales.ss_coupon_amt >= 6600.00),(store_sales.ss_coupon_amt <= 7600.00)],AND[(store_sales.ss_wholesale_cost >= 9.00),(store_sales.ss_wholesale_cost <= 29.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 91.00),(store_sales.ss_list_price <= 101.00)],AND[(store_sales.ss_coupon_amt >= 13493.00),(store_sales.ss_coupon_amt <= 14493.00)],AND[(store_sales.ss_wholesale_cost >= 36.00),(store_sales.ss_wholesale_cost <= 56.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 0.00),(store_sales.ss_list_price <= 10.00)],AND[(store_sales.ss_coupon_amt >= 7629.00),(store_sales.ss_coupon_amt <= 8629.00)],AND[(store_sales.ss_wholesale_cost >= 6.00),(store_sales.ss_wholesale_cost <= 26.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 15257.00),(store_sales.ss_coupon_amt <= 16257.00)],AND[(store_sales.ss_wholesale_cost >= 31.00),(store_sales.ss_wholesale_cost <= 51.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query29.out deleted file mode 100644 index 0ad4d176c10d08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF7 sr_customer_sk->[cs_bill_customer_sk];RF8 sr_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[sr_item_sk,ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF5 ---------------------------------------PhysicalProject -----------------------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------filter(d_year IN (1999, 2000, 2001)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query3.out deleted file mode 100644 index 4092c73d09fd5b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manufact_id = 816)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 11)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query30.out deleted file mode 100644 index 1fd2b5a1688c12..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[wr_returning_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[wr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'IN')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query31.out deleted file mode 100644 index c768d674841b33..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------PhysicalProject ---------------------filter((ss.d_year = 2000) and d_qoy IN (1, 2, 3)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[customer_address] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ws_bill_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------PhysicalProject -----------------------filter((ws.d_year = 2000) and d_qoy IN (1, 2, 3)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county] -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 2000)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF7 ca_county->[ca_county] ---------------------PhysicalProject -----------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF6 ca_county->[ca_county,ca_county,ca_county] -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 2000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 ---------------------------PhysicalProject -----------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 2000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF6 -------------------------PhysicalProject ---------------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 2000)) -----------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query32.out deleted file mode 100644 index 7992f57d1c87b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 29)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '1999-04-07') and (date_dim.d_date >= '1999-01-07')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query33.out deleted file mode 100644 index 3cc7c048f5784a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF3 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF7 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF7 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF11 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 ws_item_sk->[i_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF10 RF11 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 2002)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------filter((item.i_category = 'Home')) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query34.out deleted file mode 100644 index 79c46ccb77bbb8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[ss_hdemo_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) -------------------------------------PhysicalOlapScan[household_demographics] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (1998, 1999, 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Franklin Parish', 'Luce County', 'Richland County', 'Walker County', 'Williamson County', 'Ziebach County')) -----------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query35.out deleted file mode 100644 index dc926eb4b522f0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------hashJoin[LEFT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF5 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[c_current_cdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query36.out deleted file mode 100644 index 92f5563f1a38b5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((d1.d_year = 2002)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter(s_state IN ('AL', 'GA', 'MI', 'MO', 'OH', 'SC', 'SD', 'TN')) -------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query37.out deleted file mode 100644 index cc63716e4ba212..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 75.00) and (item.i_current_price >= 45.00) and i_manufact_id IN (1000, 707, 747, 856)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '1999-04-22') and (date_dim.d_date >= '1999-02-21')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query38.out deleted file mode 100644 index ef2051eb2d000c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ws_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1194) and (date_dim.d_month_seq >= 1183)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query39.out deleted file mode 100644 index b7ca740e55c672..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF2 w_warehouse_sk->[inv_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998) and d_moy IN (1, 2)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query4.out deleted file mode 100644 index 709da33d851bff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk,ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id] -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query40.out deleted file mode 100644 index 5ff27658e2ed3f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF4 w_warehouse_sk->[cs_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF4 -------------------------------PhysicalProject ---------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2001-05-02') and (date_dim.d_date >= '2001-03-03')) -------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query41.out deleted file mode 100644 index 3034a77fe0897a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 788) and (i1.i_manufact_id >= 748)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('aquamarine', 'blue', 'chartreuse', 'chiffon', 'dodger', 'gainsboro', 'tan', 'violet'),i_units IN ('Bunch', 'Dozen', 'Each', 'Ounce', 'Oz', 'Pound', 'Ton', 'Tsp'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'gainsboro'),i_units IN ('Dozen', 'Ounce'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('chiffon', 'violet'),i_units IN ('Pound', 'Ton'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('blue', 'chartreuse'),i_units IN ('Each', 'Oz'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('dodger', 'tan'),i_units IN ('Bunch', 'Tsp'),i_size IN ('economy', 'medium')]]],AND[i_color IN ('almond', 'blanched', 'indian', 'lime', 'peru', 'saddle', 'spring', 'tomato'),i_units IN ('Box', 'Carton', 'Case', 'Dram', 'Gram', 'Pallet', 'Tbl', 'Unknown'),OR[AND[(item.i_category = 'Women'),i_color IN ('blanched', 'tomato'),i_units IN ('Case', 'Tbl'),i_size IN ('economy', 'medium')],AND[(item.i_category = 'Women'),i_color IN ('almond', 'lime'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'small')],AND[(item.i_category = 'Men'),i_color IN ('peru', 'saddle'),i_units IN ('Gram', 'Pallet'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('indian', 'spring'),i_units IN ('Carton', 'Unknown'),i_size IN ('economy', 'medium')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'medium', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query42.out deleted file mode 100644 index 2ca590a9d0d4d1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((item.i_manager_id = 1)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((dt.d_moy = 11) and (dt.d_year = 2002)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query44.out deleted file mode 100644 index 86d157354860a4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 146)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 146)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 146) and ss_addr_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query45.out deleted file mode 100644 index 6bbc52ecb1a343..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ws_bill_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query46.out deleted file mode 100644 index c1ebbb551053cb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_customer_sk->[ss_customer_sk] -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[ss_addr_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter(d_dow IN (0, 6) and d_year IN (1999, 2000, 2001)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 6),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter(s_city IN ('Centerville', 'Fairview', 'Five Points', 'Liberty', 'Oak Grove')) -----------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query47.out deleted file mode 100644 index ecc4777c22f664..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() build RFs:RF8 i_category->[i_category,i_category];RF9 i_brand->[i_brand,i_brand];RF10 s_store_name->[s_store_name,s_store_name];RF11 s_company_name->[s_company_name,s_company_name];RF12 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 RF11 RF12 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 RF9 RF10 RF11 RF12 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query48.out deleted file mode 100644 index e7bd6986059878..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'MD', 'MN'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('IL', 'TX', 'VA'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('IN', 'MI', 'WI'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF2 ca_address_sk->[ss_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'U'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'U', 'W')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 1999)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'IL', 'IN', 'MD', 'MI', 'MN', 'TX', 'VA', 'WI')) ---------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query49.out deleted file mode 100644 index 7d0f6b1ce22d0f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF1 ws_order_number->[wr_order_number];RF2 ws_item_sk->[wr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF4 cs_order_number->[cr_order_number];RF5 cs_item_sk->[cr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 RF5 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF7 ss_ticket_number->[sr_ticket_number];RF8 ss_item_sk->[sr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF7 RF8 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1999)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query5.out deleted file mode 100644 index 917f29d09727cf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk,ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF3 cp_catalog_page_sk->[cr_catalog_page_sk,cs_catalog_page_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF7 web_site_sk->[ws_web_site_sk,ws_web_site_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query50.out deleted file mode 100644 index f5c3f38463d42c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF1 sr_ticket_number->[ss_ticket_number];RF2 sr_item_sk->[ss_item_sk];RF3 sr_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query51.out deleted file mode 100644 index 470fabc0f31e81..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1227) and (date_dim.d_month_seq >= 1216)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query52.out deleted file mode 100644 index 1eff8fc3ba89c1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 12) and (dt.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query53.out deleted file mode 100644 index 04920e65ac6894..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query54.out deleted file mode 100644 index c2d65d63990fae..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query54.out +++ /dev/null @@ -1,76 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -----------------------------PhysicalProject -------------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF4 s_county->[ca_county];RF5 s_state->[ca_state] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 RF4 RF5 -------------------------------------------------PhysicalProject ---------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 customer_sk->[c_customer_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] ---------------------------------------------------------------------PhysicalUnion -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------filter((item.i_category = 'Women') and (item.i_class = 'maternity')) -------------------------------------------------------------------------PhysicalOlapScan[item] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -----------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 5) and (date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query55.out deleted file mode 100644 index e24470e9606c8b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 100)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query56.out deleted file mode 100644 index 97c4f27b14edb6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 cs_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF7 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ws_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 2) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('cyan', 'green', 'powder')) ---------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query57.out deleted file mode 100644 index 2cab4f33e1358d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 1999),AND[(date_dim.d_year = 1998),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2000),(date_dim.d_moy = 1)]] and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[call_center] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() build RFs:RF7 i_category->[i_category,i_category];RF8 i_brand->[i_brand,i_brand];RF9 cc_name->[cc_name,cc_name];RF10 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 1999)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 RF8 RF9 RF10 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query58.out deleted file mode 100644 index 62cf69cc400980..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[cs_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-03-24')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF13 -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-03-24')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-03-24')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query59.out deleted file mode 100644 index 494750bddc66ee..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52)) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() build RFs:RF5 s_store_id2->[s_store_id] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1207) and (d.d_month_seq >= 1196)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1219) and (d.d_month_seq >= 1208)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query6.out deleted file mode 100644 index 43c8732d7f8553..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF5 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF1 d_month_seq->[d_month_seq] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------------PhysicalAssertNumRows -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) build RFs:RF0 i_category->[i_category] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query60.out deleted file mode 100644 index f3678f64aece45..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ss_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Children')) -----------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 ca_address_sk->[cs_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ca_address_sk->[ws_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 8) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Children')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query61.out deleted file mode 100644 index faf30604b86926..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 ss_item_sk->[i_item_sk] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] apply RFs: RF10 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF9 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF8 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF8 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 ss_sold_date_sk->[d_date_sk] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF7 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF6 ss_promo_sk->[p_promo_sk] -----------------------------------PhysicalProject -------------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) ---------------------------------------PhysicalOlapScan[promotion] apply RFs: RF6 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 ---------------------------------------PhysicalProject -----------------------------------------filter((store.s_gmt_offset = -7.00)) -------------------------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 ss_item_sk->[i_item_sk] -------------------PhysicalProject ---------------------filter((item.i_category = 'Jewelry')) -----------------------PhysicalOlapScan[item] apply RFs: RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------filter((customer_address.ca_gmt_offset = -7.00)) ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 ss_sold_date_sk->[d_date_sk] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1999)) -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter((store.s_gmt_offset = -7.00)) ---------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query62.out deleted file mode 100644 index 928dd1dc5d5a9a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF2 sm_ship_mode_sk->[ws_ship_mode_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[ws_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query63.out deleted file mode 100644 index d4fb4990da98b8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query64.out deleted file mode 100644 index 2ef8d6701ee4c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query64.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF19 d_date_sk->[c_first_shipto_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF18 d_date_sk->[c_first_sales_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF17 ss_customer_sk->[c_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF16 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF15 cd_demo_sk->[c_current_cdemo_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF14 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF14 RF15 RF16 RF17 RF18 RF19 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF13 ib_income_band_sk->[hd_income_band_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF13 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[income_band] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF11 ss_item_sk->[sr_item_sk];RF12 ss_ticket_number->[sr_ticket_number] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_returns] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF10 ss_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer_address] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF9 ss_cdemo_sk->[cd_demo_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF9 -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[cr_item_sk,cs_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF6 s_store_sk->[ss_store_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() build RFs:RF5 ib_income_band_sk->[hd_income_band_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ss_hdemo_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF3 cs_item_sk->[ss_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 RF4 RF6 RF7 RF8 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter(d_year IN (2001, 2002)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sale > (2 * refund))) ---------------------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF0 cr_item_sk->[cs_item_sk];RF1 cr_order_number->[cs_order_number] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF8 -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF8 -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF5 ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[income_band] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[promotion] ---------------------------------------PhysicalProject -----------------------------------------filter((item.i_current_price <= 33.00) and (item.i_current_price >= 24.00) and i_color IN ('blanched', 'brown', 'burlywood', 'chocolate', 'drab', 'medium')) -------------------------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 2001)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2002)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query65.out deleted file mode 100644 index 1664203e1172f5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk,ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF2 ss_store_sk->[ss_store_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF4 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_month_seq <= 1232) and (date_dim.d_month_seq >= 1221)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[item] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query66.out deleted file mode 100644 index a4ff984e4cdc43..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF3 w_warehouse_sk->[ws_warehouse_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF0 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF7 w_warehouse_sk->[cs_warehouse_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF4 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 RF7 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('GREAT EASTERN', 'LATVIAN')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 77621) and (time_dim.t_time >= 48821)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query67.out deleted file mode 100644 index 09aed607328af2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalPartitionTopN ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1217) and (date_dim.d_month_seq >= 1206)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query68.out deleted file mode 100644 index aa07d1b2a42d9b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_current_addr_sk->[ca_address_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] apply RFs: RF5 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ss_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Five Points', 'Pleasant Hill')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = -1)]) ---------------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query69.out deleted file mode 100644 index a68ff0c1138094..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_ship_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF2 c_current_cdemo_sk->[cd_demo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------filter(ca_state IN ('MI', 'TX', 'VA')) -----------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query7.out deleted file mode 100644 index 49acfd90e56654..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query70.out deleted file mode 100644 index b1074dc6ffeed5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1224) and (d1.d_month_seq >= 1213)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() build RFs:RF2 s_state->[s_state] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((date_dim.d_month_seq <= 1224) and (date_dim.d_month_seq >= 1213)) -------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query71.out deleted file mode 100644 index 3010f0b574e03b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_manager_id = 1)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 1998)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query72.out deleted file mode 100644 index e0d8acf21bdec5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() build RFs:RF8 w_warehouse_sk->[inv_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk) and (inventory.inv_date_sk = d2.d_date_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF6 d_date_sk->[inv_date_sk];RF7 cs_item_sk->[inv_item_sk] -------------------------------PhysicalOlapScan[inventory] apply RFs: RF6 RF7 RF8 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_week_seq = d2.d_week_seq)) otherCondition=() build RFs:RF5 d_week_seq->[d_week_seq] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[cs_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk)) otherCondition=((d3.d_date > days_add(d_date, INTERVAL 5 DAY))) build RFs:RF3 d_date_sk->[cs_ship_date_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF0 hd_demo_sk->[cs_bill_hdemo_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 RF4 -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((household_demographics.hd_buy_potential = '501-1000')) -----------------------------------------------------------PhysicalOlapScan[household_demographics] ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((d1.d_year = 2002)) -------------------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -----------------------------------------------PhysicalProject -------------------------------------------------filter((customer_demographics.cd_marital_status = 'W')) ---------------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_returns] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query73.out deleted file mode 100644 index bfc42f79bbc570..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('501-1000', 'Unknown')) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter(s_county IN ('Barrow County', 'Daviess County', 'Fairfield County', 'Walker County')) -----------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query74.out deleted file mode 100644 index 421b74396da876..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.0), (year_total / year_total), NULL) > if((year_total > 0.0), (year_total / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.0)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.0)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query75.out deleted file mode 100644 index 921d754e533285..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query75.out +++ /dev/null @@ -1,73 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF6 ss_ticket_number->[sr_ticket_number];RF7 ss_item_sk->[sr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF10 ws_order_number->[wr_order_number];RF11 ws_item_sk->[wr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_returns] apply RFs: RF10 RF11 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Home')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (1998, 1999)) ---------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF12 i_brand_id->[i_brand_id];RF13 i_class_id->[i_class_id];RF14 i_category_id->[i_category_id];RF15 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 1999)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 RF13 RF14 RF15 ---------------filter((prev_yr.d_year = 1998)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query76.out deleted file mode 100644 index 668c3625c56841..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 ss_sold_date_sk->[d_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] apply RFs: RF3 -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 ss_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter(ss_hdemo_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 ws_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter(ws_bill_addr_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 cs_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF2 -----------------------------PhysicalProject -------------------------------filter(cs_warehouse_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query77.out deleted file mode 100644 index cdecac9706c07d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF9 wp_web_page_sk->[ws_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF7 wp_web_page_sk->[wr_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '1998-09-04') and (date_dim.d_date >= '1998-08-05')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query78.out deleted file mode 100644 index a6034ca86ac5c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query79.out deleted file mode 100644 index 974fb1da39b734..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffle] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (1998, 1999, 2000)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 5),(household_demographics.hd_vehicle_count > 4)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) -----------------------------PhysicalOlapScan[store] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query8.out deleted file mode 100644 index a254eeae049f91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query80.out deleted file mode 100644 index b64c3639ecbb6d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF4 ss_item_sk->[sr_item_sk];RF5 ss_ticket_number->[sr_ticket_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF11 cp_catalog_page_sk->[cs_catalog_page_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF9 cs_item_sk->[cr_item_sk];RF10 cs_order_number->[cr_order_number] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF8 p_promo_sk->[cs_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cs_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 RF8 RF11 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF16 ws_item_sk->[wr_item_sk];RF17 ws_order_number->[wr_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF16 RF17 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF15 web_site_sk->[ws_web_site_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF14 p_promo_sk->[ws_promo_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF13 i_item_sk->[ws_item_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF12 RF13 RF14 RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '1998-09-27') and (date_dim.d_date >= '1998-08-28')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((item.i_current_price > 50.00)) -------------------------------------------------PhysicalOlapScan[item] -----------------------------------------PhysicalProject -------------------------------------------filter((promotion.p_channel_tv = 'N')) ---------------------------------------------PhysicalOlapScan[promotion] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query81.out deleted file mode 100644 index e6aef6266d392b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[cr_returning_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2002)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'CA')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query82.out deleted file mode 100644 index 5142d25b09e8a3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 47.00) and (item.i_current_price >= 17.00) and i_manufact_id IN (138, 169, 339, 639)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '1999-09-07') and (date_dim.d_date >= '1999-07-09')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query83.out deleted file mode 100644 index c6d7033d8cecea..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 cr_item_sk->[i_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[cr_returned_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) -----------------------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[sr_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[sr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 wr_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF3 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[wr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-06-06', '2001-09-02', '2001-11-11')) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query84.out deleted file mode 100644 index 82e9098dbdcbb9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF3 c_current_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Oakwood')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF0 ib_income_band_sk->[hd_income_band_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[household_demographics] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((cast(ib_upper_bound as BIGINT) <= 55806) and (income_band.ib_lower_bound >= 5806)) -----------------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query85.out deleted file mode 100644 index be579258fa046a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF9 wp_web_page_sk->[ws_web_page_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() build RFs:RF8 r_reason_sk->[wr_reason_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF5 wr_returning_cdemo_sk->[cd_demo_sk];RF6 cd_marital_status->[cd_marital_status];RF7 cd_education_status->[cd_education_status] -----------------------------PhysicalProject -------------------------------filter(cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) ---------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF5 RF6 RF7 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF4 wr_refunded_cdemo_sk->[cd_demo_sk] ---------------------------------PhysicalProject -----------------------------------filter(OR[AND[(cd1.cd_marital_status = 'M'),(cd1.cd_education_status = '4 yr Degree')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'Secondary')],AND[(cd1.cd_marital_status = 'W'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('4 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('M', 'S', 'W')) -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('DE', 'FL', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('ID', 'IN', 'ND'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('IL', 'MT', 'OH'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF3 ca_address_sk->[wr_refunded_addr_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF1 ws_item_sk->[wr_item_sk];RF2 ws_order_number->[wr_order_number] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 RF3 RF8 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF9 ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_year = 2000)) -------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('DE', 'FL', 'ID', 'IL', 'IN', 'MT', 'ND', 'OH', 'TX')) -----------------------------------------PhysicalOlapScan[customer_address] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[reason] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query86.out deleted file mode 100644 index ebfed4e4aa6c91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1235) and (d1.d_month_seq >= 1224)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query87.out deleted file mode 100644 index e16cd1ad45b6dd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ss_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ws_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1195) and (date_dim.d_month_seq >= 1184)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query88.out deleted file mode 100644 index a619efe9def125..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF22 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF21 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF19 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF18 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF16 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF15 t_time_sk->[ss_sold_time_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) -------------------------------------PhysicalOlapScan[time_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF12 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF10 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF9 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF7 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_vehicle_count <= 6) and OR[AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 4),AND[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count <= 5)]] and hd_dep_count IN (-1, 3, 4)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query89.out deleted file mode 100644 index e4d2ae3435f174..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Electronics', 'Jewelry', 'Shoes'),i_class IN ('athletic', 'portable', 'semi-precious')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'maternity', 'rock')]] and i_category IN ('Electronics', 'Jewelry', 'Men', 'Music', 'Shoes', 'Women') and i_class IN ('accessories', 'athletic', 'maternity', 'portable', 'rock', 'semi-precious')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 1999)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query90.out deleted file mode 100644 index 13607b4ae13f5d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 11) and (time_dim.t_hour >= 10)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 2)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 17) and (time_dim.t_hour >= 16)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 2)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query91.out deleted file mode 100644 index 6af6c014d0c298..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF5 cc_call_center_sk->[cr_call_center_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cr_returning_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 RF4 RF5 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 c_current_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -6.00)) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) ---------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------PhysicalProject ---------------------------------------filter((hd_buy_potential like '1001-5000%')) -----------------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query92.out deleted file mode 100644 index b31b235e379e59..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 320)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2002-05-27') and (date_dim.d_date >= '2002-02-26')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query93.out deleted file mode 100644 index 45f02ddf38ee38..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF0 r_reason_sk->[sr_reason_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((reason.r_reason_desc = 'duplicate purchase')) ---------------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query94.out deleted file mode 100644 index 6a25137bf51fcf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF3 ws_order_number->[ws_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF2 web_site_sk->[ws_web_site_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[ws_ship_addr_sk] -------------------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((customer_address.ca_state = 'OK')) -----------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2000-04-01') and (date_dim.d_date >= '2000-02-01')) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter((web_site.web_company_name = 'pri')) ---------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query95.out deleted file mode 100644 index 269d330c090c56..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number];RF1 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF14 RF15 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF14 RF15 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF12 ws_order_number->[wr_order_number,ws_order_number];RF13 ws_order_number->[wr_order_number,ws_order_number] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF10 wr_order_number->[ws_order_number];RF11 wr_order_number->[ws_order_number] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF10 RF11 RF12 RF13 -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_returns] apply RFs: RF12 RF13 -------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF14 ws_order_number->[ws_order_number,ws_order_number];RF15 ws_order_number->[ws_order_number,ws_order_number] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF6 web_site_sk->[ws_web_site_sk];RF7 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_ship_date_sk];RF5 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ws_ship_addr_sk];RF3 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 RF4 RF5 RF6 RF7 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'NC')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '1999-04-02') and (date_dim.d_date >= '1999-02-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query96.out deleted file mode 100644 index c50ffa373c8150..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_dep_count = 3)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query97.out deleted file mode 100644 index 4ebfd5abc0eb1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query97.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1225) and (date_dim.d_month_seq >= 1214)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query98.out deleted file mode 100644 index d1a4251b785e74..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-06-19') and (date_dim.d_date >= '2002-05-20')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Music', 'Shoes', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query99.out deleted file mode 100644 index e8094a7d066e20..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf100/shape/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF3 cc_call_center_sk->[cs_call_center_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF2 sm_ship_mode_sk->[cs_ship_mode_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[cs_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1235) and (date_dim.d_month_seq >= 1224)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.out deleted file mode 100644 index 0db3330914c18a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IL', 'TN', 'TX'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('ID', 'OH', 'WY'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('IA', 'MS', 'SC'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF3 ss_addr_sk->[ca_address_sk] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'ID', 'IL', 'MS', 'OH', 'SC', 'TN', 'TX', 'WY')) ---------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('D', 'W'),cd_education_status IN ('2 yr Degree', 'Primary'),OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------PhysicalProject -------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'M', 'W')) ---------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter(hd_dep_count IN (1, 3)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2001)) -------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.out deleted file mode 100644 index 64754afd654bc6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) build RFs:RF4 c_current_addr_sk->[ca_address_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] apply RFs: RF4 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((item.i_manager_id = 14)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.out deleted file mode 100644 index 5c302c265fc9f3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 4)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 4) and ss_hdemo_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 4)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 4) and ss_hdemo_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.out deleted file mode 100644 index 72acf1dc6b71ff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ws_bill_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.out deleted file mode 100644 index 397a41b34c4e60..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.out +++ /dev/null @@ -1,76 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -----------------------------PhysicalProject -------------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF4 s_county->[ca_county];RF5 s_state->[ca_state] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 RF4 RF5 -------------------------------------------------PhysicalProject ---------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 customer_sk->[c_customer_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] ---------------------------------------------------------------------PhysicalUnion -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------filter((item.i_category = 'Music') and (item.i_class = 'country')) -------------------------------------------------------------------------PhysicalOlapScan[item] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) -----------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.out deleted file mode 100644 index d3ec5b9b1b897e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 ca_address_sk->[cs_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ws_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.out deleted file mode 100644 index 43c8732d7f8553..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF5 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF1 d_month_seq->[d_month_seq] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------------PhysicalAssertNumRows -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) build RFs:RF0 i_category->[i_category] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.out deleted file mode 100644 index e768a09ec1494d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF10 c_current_addr_sk->[ca_address_sk] -------------------PhysicalProject ---------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------PhysicalOlapScan[customer_address] apply RFs: RF10 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF9 ss_customer_sk->[c_customer_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[ss_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF7 RF8 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) -----------------------------------PhysicalOlapScan[promotion] ---------------------------PhysicalProject -----------------------------filter((store.s_gmt_offset = -7.00)) -------------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((item.i_category = 'Home')) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -7.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.out deleted file mode 100644 index 2f4fbe401f1315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_current_addr_sk->[ca_address_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] apply RFs: RF5 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ss_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Fairview', 'Midway')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count = 4)]) ---------------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.out deleted file mode 100644 index a254eeae049f91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.out deleted file mode 100644 index 9d3c77acb23ca8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF5 cc_call_center_sk->[cr_call_center_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cr_returning_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 RF4 RF5 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 c_current_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) ---------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------PhysicalProject ---------------------------------------filter((hd_buy_potential like 'Unknown%')) -----------------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.out deleted file mode 100644 index 21c6fa60d37b75..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF7 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF7 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF6 ws_order_number->[wr_order_number,ws_order_number] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF5 wr_order_number->[ws_order_number] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF7 ws_order_number->[ws_order_number,ws_order_number] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'VA')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '2001-05-31') and (date_dim.d_date >= '2001-04-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.out deleted file mode 100644 index 78fd7c847c29ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(ca_county IN ('Campbell County', 'Cleburne County', 'Escambia County', 'Fairfield County', 'Washtenaw County')) ---------------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query1.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query1.out deleted file mode 100644 index e4a4bd5e427fa9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query1.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_1 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------PhysicalProject -------------------filter((date_dim.d_year = 2000)) ---------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ctr_customer_sk->[c_customer_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[customer] apply RFs: RF3 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_store_sk = ctr2.ctr_store_sk)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF2 ctr_store_sk->[ctr_store_sk,s_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store.s_store_sk = ctr1.ctr_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ctr_store_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((store.s_state = 'TN')) ---------------------------PhysicalOlapScan[store] apply RFs: RF2 -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query10.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query10.out deleted file mode 100644 index 78fd7c847c29ed..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query10.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_10 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy <= 6) and (date_dim.d_moy >= 3) and (date_dim.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(ca_county IN ('Campbell County', 'Cleburne County', 'Escambia County', 'Fairfield County', 'Washtenaw County')) ---------------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query11.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query11.out deleted file mode 100644 index 35504b7f44d24e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query11.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_11 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1998, 1999)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1998, 1999)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000) > if((year_total > 0.00), (cast(year_total as DECIMALV3(38, 8)) / year_total), 0.000000))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 1999) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.dyear = 1999) and (t_s_secyear.sale_type = 's')) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.dyear = 1998) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1998) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query12.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query12.out deleted file mode 100644 index f46e97e8a5b3c6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query12.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_12 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2001-07-15') and (date_dim.d_date >= '2001-06-15')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Electronics', 'Men')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query13.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query13.out deleted file mode 100644 index 0db3330914c18a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query13.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_13 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('IL', 'TN', 'TX'),(store_sales.ss_net_profit >= 100.00),(store_sales.ss_net_profit <= 200.00)],AND[ca_state IN ('ID', 'OH', 'WY'),(store_sales.ss_net_profit >= 150.00)],AND[ca_state IN ('IA', 'MS', 'SC'),(store_sales.ss_net_profit <= 250.00)]]) build RFs:RF3 ss_addr_sk->[ca_address_sk] -----------------PhysicalProject -------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('IA', 'ID', 'IL', 'MS', 'OH', 'SC', 'TN', 'TX', 'WY')) ---------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=(OR[AND[(household_demographics.hd_dep_count = 1),cd_marital_status IN ('D', 'W'),cd_education_status IN ('2 yr Degree', 'Primary'),OR[AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Primary'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price >= 150.00)]]],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'College'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00),(household_demographics.hd_dep_count = 3)]]) build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_net_profit <= 300.00) and (store_sales.ss_net_profit >= 50.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF4 -----------------------------PhysicalProject -------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'College')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Primary')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = '2 yr Degree')]] and cd_education_status IN ('2 yr Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'M', 'W')) ---------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------filter(hd_dep_count IN (1, 3)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2001)) -------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query14.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query14.out deleted file mode 100644 index 61f29b11211346..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query14.out +++ /dev/null @@ -1,152 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_14 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_brand_id = t.brand_id) and (item.i_category_id = t.category_id) and (item.i_class_id = t.class_id)) otherCondition=() build RFs:RF6 brand_id->[i_brand_id];RF7 class_id->[i_class_id];RF8 category_id->[i_category_id] ---------PhysicalProject -----------PhysicalOlapScan[item] apply RFs: RF6 RF7 RF8 ---------PhysicalIntersect -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = iss.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((d1.d_year <= 2001) and (d1.d_year >= 1999)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = ics.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter((d2.d_year <= 2001) and (d2.d_year >= 1999)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = iws.i_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 -------------------------PhysicalProject ---------------------------filter((d3.d_year <= 2001) and (d3.d_year >= 1999)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -----------------PhysicalProject -------------------PhysicalUnion ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF9 ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 -----------------PhysicalProject -------------------filter((date_dim.d_year <= 2001) and (date_dim.d_year >= 1999)) ---------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalRepeat -----------------------PhysicalUnion -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[ss_item_sk,ss_item_sk] -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF11 ss_item_sk->[ss_item_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF10 d_date_sk->[ss_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF10 RF11 RF12 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF15 i_item_sk->[cs_item_sk,ss_item_sk] -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF14 ss_item_sk->[cs_item_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF13 d_date_sk->[cs_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF13 RF14 RF15 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF15 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(sales as DOUBLE) > cast(average_sales as DOUBLE)) -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF18 i_item_sk->[ss_item_sk,ws_item_sk] -----------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = cross_items.ss_item_sk)) otherCondition=() build RFs:RF17 ss_item_sk->[ws_item_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF16 d_date_sk->[ws_sold_date_sk] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF16 RF17 RF18 -----------------------------------------------PhysicalProject -------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2001)) ---------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF18 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------PhysicalAssertNumRows ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query15.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query15.out deleted file mode 100644 index fe0fd80c2f8b05..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query15.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_15 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),ca_state IN ('CA', 'GA', 'WA'),(catalog_sales.cs_sales_price > 500.00)]) build RFs:RF2 c_customer_sk->[cs_bill_customer_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 2001)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query16.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query16.out deleted file mode 100644 index db5bf9b39a05ec..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query16.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_16 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((cs1.cs_order_number = cs2.cs_order_number)) otherCondition=(( not (cs_warehouse_sk = cs_warehouse_sk))) build RFs:RF4 cs_order_number->[cs_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 -------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((cs1.cs_order_number = cr1.cr_order_number)) otherCondition=() build RFs:RF3 cs_order_number->[cr_order_number] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF2 cc_call_center_sk->[cs_call_center_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs1.cs_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[cs_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'PA')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '2002-05-31') and (date_dim.d_date >= '2002-04-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((call_center.cc_county = 'Williamson County')) -----------------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query17.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query17.out deleted file mode 100644 index 12fa11701b619f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query17.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_17 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -------------------------PhysicalProject ---------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[sr_item_sk,ss_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_quarter_name = '2001Q1')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF6 -------------------------------------PhysicalProject ---------------------------------------filter(d_quarter_name IN ('2001Q1', '2001Q2', '2001Q3')) -----------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query18.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query18.out deleted file mode 100644 index ea401d9c36dc08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query18.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_18 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF5 i_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF2 cd_demo_sk->[cs_bill_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 RF4 RF5 -----------------------------------PhysicalProject -------------------------------------filter((cd1.cd_education_status = 'Primary') and (cd1.cd_gender = 'F')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF1 c_current_cdemo_sk->[cd_demo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter(c_birth_month IN (1, 10, 11, 3, 4, 7)) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(ca_state IN ('AL', 'CA', 'GA', 'IN', 'MO', 'MT', 'TN')) -------------------------------------------PhysicalOlapScan[customer_address] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query19.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query19.out deleted file mode 100644 index 64754afd654bc6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query19.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_19 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (substring(ca_zip, 1, 5) = substring(s_zip, 1, 5)))) build RFs:RF4 c_current_addr_sk->[ca_address_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[customer_address] apply RFs: RF4 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((item.i_manager_id = 14)) -----------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query2.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query2.out deleted file mode 100644 index 988b288ebb81d5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query2.out +++ /dev/null @@ -1,39 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_2 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = wscs.sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF0 -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 53))) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF2 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF2 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1998)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((date_dim.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -------------------PhysicalProject ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF1 -------------------PhysicalProject ---------------------filter((date_dim.d_year = 1999)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query20.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query20.out deleted file mode 100644 index 8728415de8b335..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query20.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_20 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '2002-07-18') and (date_dim.d_date >= '2002-06-18')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Books', 'Music', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query21.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query21.out deleted file mode 100644 index f68b978b0b2ba6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query21.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_21 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) <= 1.5) and (if((inv_before > 0), (cast(inv_after as DOUBLE) / cast(inv_before as DOUBLE)), NULL) >= cast((2.000000 / 3.0) as DOUBLE))) -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF2 w_warehouse_sk->[inv_warehouse_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = inventory.inv_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '1999-07-22') and (date_dim.d_date >= '1999-05-23')) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query22.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query22.out deleted file mode 100644 index 09dedb98772f96..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query22.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_22 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[inv_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1211) and (date_dim.d_month_seq >= 1200)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query23.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query23.out deleted file mode 100644 index 45d7a47eb7e46a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query23.out +++ /dev/null @@ -1,81 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_23 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((cnt > 4)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter(d_year IN (2000, 2001, 2002, 2003)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalCteAnchor ( cteId=CTEId#2 ) -----PhysicalCteProducer ( cteId=CTEId#2 ) -------PhysicalProject ---------NestedLoopJoin[INNER_JOIN](cast(ssales as DOUBLE) > cast((0.9500 * tpcds_cmax) as DOUBLE)) -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter(( not ss_customer_sk IS NULL)) -----------------------PhysicalOlapScan[store_sales] -----------PhysicalProject -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------filter(( not ss_customer_sk IS NULL)) -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------------------PhysicalProject ---------------------------------filter(d_year IN (2000, 2001, 2002, 2003)) -----------------------------------PhysicalOlapScan[date_dim] -----PhysicalResultSink -------PhysicalLimit[GLOBAL] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((catalog_sales.cs_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF5 cs_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 7) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) -------------------PhysicalProject ---------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((web_sales.ws_item_sk = frequent_ss_items.item_sk)) otherCondition=() build RFs:RF8 ws_item_sk->[item_sk] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 -----------------------PhysicalProject -------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((web_sales.ws_bill_customer_sk = best_ss_customer.c_customer_sk)) otherCondition=() build RFs:RF7 c_customer_sk->[ws_bill_customer_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 7) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalCteConsumer ( cteId=CTEId#2 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query24.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query24.out deleted file mode 100644 index d93b73ade16ecb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query24.out +++ /dev/null @@ -1,52 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_24 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF5 sr_ticket_number->[ss_ticket_number];RF6 sr_item_sk->[i_item_sk,ss_item_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_zip = customer_address.ca_zip) and (store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 ca_zip->[s_zip];RF3 c_customer_sk->[ss_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF3 RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((store.s_market_id = 5)) ---------------------------------PhysicalOlapScan[store] apply RFs: RF2 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=(( not (c_birth_country = upper(ca_country)))) build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------PhysicalOlapScan[item] apply RFs: RF6 -----------------PhysicalProject -------------------PhysicalOlapScan[store_returns] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------NestedLoopJoin[INNER_JOIN](cast(paid as DOUBLE) > cast((0.05 * avg(cast(netpaid as DECIMALV3(38, 4)))) as DOUBLE)) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------filter((ssales.i_color = 'aquamarine')) -----------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query25.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query25.out deleted file mode 100644 index 8ccafdc60f8ba4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query25.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_25 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF8 sr_customer_sk->[cs_bill_customer_sk];RF9 sr_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[cs_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------filter((d3.d_moy <= 10) and (d3.d_moy >= 4) and (d3.d_year = 1999)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[sr_item_sk,ss_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 -----------------------------------PhysicalProject -------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1999)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF6 -----------------------------------PhysicalProject -------------------------------------filter((d2.d_moy <= 10) and (d2.d_moy >= 4) and (d2.d_year = 1999)) ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query26.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query26.out deleted file mode 100644 index 383242890f9dd4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query26.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_26 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[cs_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'Unknown') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2002)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query27.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query27.out deleted file mode 100644 index 0fa387fb0d6bb9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query27.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_27 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -----------------------------------PhysicalProject -------------------------------------filter((customer_demographics.cd_education_status = 'Secondary') and (customer_demographics.cd_gender = 'M') and (customer_demographics.cd_marital_status = 'W')) ---------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------PhysicalProject ---------------------------------filter((store.s_state = 'TN')) -----------------------------------PhysicalOlapScan[store] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query28.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query28.out deleted file mode 100644 index 36ec7305d96abb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query28.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_28 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------NestedLoopJoin[CROSS_JOIN] ---------PhysicalLimit[LOCAL] -----------NestedLoopJoin[CROSS_JOIN] -------------PhysicalLimit[LOCAL] ---------------NestedLoopJoin[CROSS_JOIN] -----------------PhysicalLimit[LOCAL] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------PhysicalLimit[LOCAL] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 5) and (store_sales.ss_quantity >= 0) and OR[AND[(store_sales.ss_list_price >= 107.00),(store_sales.ss_list_price <= 117.00)],AND[(store_sales.ss_coupon_amt >= 1319.00),(store_sales.ss_coupon_amt <= 2319.00)],AND[(store_sales.ss_wholesale_cost >= 60.00),(store_sales.ss_wholesale_cost <= 80.00)]]) -------------------------------------PhysicalOlapScan[store_sales] -------------------------PhysicalLimit[LOCAL] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 10) and (store_sales.ss_quantity >= 6) and OR[AND[(store_sales.ss_list_price >= 23.00),(store_sales.ss_list_price <= 33.00)],AND[(store_sales.ss_coupon_amt >= 825.00),(store_sales.ss_coupon_amt <= 1825.00)],AND[(store_sales.ss_wholesale_cost >= 43.00),(store_sales.ss_wholesale_cost <= 63.00)]]) -------------------------------------PhysicalOlapScan[store_sales] ---------------------PhysicalLimit[LOCAL] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 15) and (store_sales.ss_quantity >= 11) and OR[AND[(store_sales.ss_list_price >= 74.00),(store_sales.ss_list_price <= 84.00)],AND[(store_sales.ss_coupon_amt >= 4381.00),(store_sales.ss_coupon_amt <= 5381.00)],AND[(store_sales.ss_wholesale_cost >= 57.00),(store_sales.ss_wholesale_cost <= 77.00)]]) ---------------------------------PhysicalOlapScan[store_sales] -----------------PhysicalLimit[LOCAL] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 16) and OR[AND[(store_sales.ss_list_price >= 89.00),(store_sales.ss_list_price <= 99.00)],AND[(store_sales.ss_coupon_amt >= 3117.00),(store_sales.ss_coupon_amt <= 4117.00)],AND[(store_sales.ss_wholesale_cost >= 68.00),(store_sales.ss_wholesale_cost <= 88.00)]]) -----------------------------PhysicalOlapScan[store_sales] -------------PhysicalLimit[LOCAL] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 25) and (store_sales.ss_quantity >= 21) and OR[AND[(store_sales.ss_list_price >= 58.00),(store_sales.ss_list_price <= 68.00)],AND[(store_sales.ss_coupon_amt >= 9402.00),(store_sales.ss_coupon_amt <= 10402.00)],AND[(store_sales.ss_wholesale_cost >= 38.00),(store_sales.ss_wholesale_cost <= 58.00)]]) -------------------------PhysicalOlapScan[store_sales] ---------PhysicalLimit[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 30) and (store_sales.ss_quantity >= 26) and OR[AND[(store_sales.ss_list_price >= 64.00),(store_sales.ss_list_price <= 74.00)],AND[(store_sales.ss_coupon_amt >= 5792.00),(store_sales.ss_coupon_amt <= 6792.00)],AND[(store_sales.ss_wholesale_cost >= 73.00),(store_sales.ss_wholesale_cost <= 93.00)]]) ---------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query29.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query29.out deleted file mode 100644 index 649a6f83d9759a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query29.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_29 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[cs_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_customer_sk = catalog_sales.cs_bill_customer_sk) and (store_returns.sr_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF7 sr_customer_sk->[cs_bill_customer_sk];RF8 sr_item_sk->[cs_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF7 RF8 RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[sr_item_sk,ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF2 sr_customer_sk->[ss_customer_sk];RF3 sr_item_sk->[ss_item_sk];RF4 sr_ticket_number->[ss_ticket_number] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((d1.d_moy = 4) and (d1.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((d2.d_moy <= 7) and (d2.d_moy >= 4) and (d2.d_year = 1998)) -------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter(d_year IN (1998, 1999, 2000)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query3.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query3.out deleted file mode 100644 index 4092c73d09fd5b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query3.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_3 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manufact_id = 816)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 11)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query30.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query30.out deleted file mode 100644 index c894fcceff19a5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query30.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_30 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[wr_returning_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[wr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'AR')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query31.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query31.out deleted file mode 100644 index f17ab6cf863612..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query31.out +++ /dev/null @@ -1,65 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_31 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------PhysicalProject ---------------------filter((ss.d_year = 1999) and d_qoy IN (1, 2, 3)) -----------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------PhysicalOlapScan[customer_address] ---PhysicalCteAnchor ( cteId=CTEId#1 ) -----PhysicalCteProducer ( cteId=CTEId#1 ) -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ws_bill_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 ---------------------PhysicalProject -----------------------filter((ws.d_year = 1999) and d_qoy IN (1, 2, 3)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] -----PhysicalResultSink -------PhysicalQuickSort[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalQuickSort[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ws1.ca_county = ws3.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF8 ca_county->[ca_county] -----------------PhysicalProject -------------------filter((ws3.d_qoy = 3) and (ws3.d_year = 1999)) ---------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF8 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((ss2.ca_county = ss3.ca_county)) otherCondition=() build RFs:RF7 ca_county->[ca_county] ---------------------PhysicalProject -----------------------filter((ss3.d_qoy = 3) and (ss3.d_year = 1999)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ws1.ca_county = ws2.ca_county)) otherCondition=((if((web_sales > 0.00), (cast(web_sales as DECIMALV3(38, 8)) / web_sales), NULL) > if((store_sales > 0.00), (cast(store_sales as DECIMALV3(38, 8)) / store_sales), NULL))) build RFs:RF6 ca_county->[ca_county,ca_county,ca_county] -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((ss1.ca_county = ws1.ca_county)) otherCondition=() build RFs:RF5 ca_county->[ca_county,ca_county] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((ss1.ca_county = ss2.ca_county)) otherCondition=() build RFs:RF4 ca_county->[ca_county] -----------------------------PhysicalProject -------------------------------filter((ss1.d_qoy = 1) and (ss1.d_year = 1999)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 RF6 -----------------------------PhysicalProject -------------------------------filter((ss2.d_qoy = 2) and (ss2.d_year = 1999)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 ---------------------------PhysicalProject -----------------------------filter((ws1.d_qoy = 1) and (ws1.d_year = 1999)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF6 -------------------------PhysicalProject ---------------------------filter((ws2.d_qoy = 2) and (ws2.d_year = 1999)) -----------------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query32.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query32.out deleted file mode 100644 index c413940debc6e7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query32.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_32 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------filter((cast(cs_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(cs_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) -----------------PhysicalWindow -------------------PhysicalQuickSort[LOCAL_SORT] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((item.i_manufact_id = 722)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2001-06-07') and (date_dim.d_date >= '2001-03-09')) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query33.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query33.out deleted file mode 100644 index 721bd289919ecf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query33.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_33 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF3 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ss_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Books')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF7 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF5 ca_address_sk->[cs_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF7 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Books')) -------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_manufact_id = item.i_manufact_id)) otherCondition=() build RFs:RF11 i_manufact_id->[i_manufact_id] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF9 ca_address_sk->[ws_bill_addr_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 RF10 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------------PhysicalOlapScan[customer_address] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF11 ---------------------PhysicalProject -----------------------filter((item.i_category = 'Books')) -------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query34.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query34.out deleted file mode 100644 index c75d4fc3e18155..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query34.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_34 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dn.cnt <= 20) and (dn.cnt >= 15)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((store.s_county = 'Williamson County')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.2) and hd_buy_potential IN ('0-500', '1001-5000')) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_dom <= 28) and (date_dim.d_dom >= 1) and OR[(date_dim.d_dom <= 3),(date_dim.d_dom >= 25)] and d_year IN (2000, 2001, 2002)) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query35.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query35.out deleted file mode 100644 index 9012700621a358..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query35.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_35 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter(OR[ifnull($c$1, FALSE),ifnull($c$2, FALSE)]) ---------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF5 cd_demo_sk->[c_current_cdemo_sk] ---------------------------hashJoin[LEFT_SEMI_JOIN bucketShuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -----------------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[ss_customer_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -----------------------------------PhysicalProject -------------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 1999)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 RF5 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 1999)) -------------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy < 4) and (date_dim.d_year = 1999)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query36.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query36.out deleted file mode 100644 index e31b175d47d2c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query36.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_36 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((store.s_state = 'TN')) ---------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query37.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query37.out deleted file mode 100644 index 149a61f7b37054..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query37.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_37 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 59.00) and (item.i_current_price >= 29.00) and i_manufact_id IN (705, 742, 777, 944)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '2002-05-28') and (date_dim.d_date >= '2002-03-29')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query38.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query38.out deleted file mode 100644 index dc794c95f97b2f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query38.out +++ /dev/null @@ -1,50 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_38 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------PhysicalIntersect -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ws_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1200) and (date_dim.d_month_seq >= 1189)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1200) and (date_dim.d_month_seq >= 1189)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ss_customer_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1200) and (date_dim.d_month_seq >= 1189)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query39.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query39.out deleted file mode 100644 index 7b00628d966265..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query39.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_39 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------filter((if((mean = 0.0), 0.0, (stdev / mean)) > 1.0)) ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[inv_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[inv_warehouse_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[inv_date_sk] ---------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2000) and d_moy IN (1, 2)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------hashJoin[INNER_JOIN shuffle] hashCondition=((inv1.i_item_sk = inv2.i_item_sk) and (inv1.w_warehouse_sk = inv2.w_warehouse_sk)) otherCondition=() build RFs:RF3 i_item_sk->[i_item_sk];RF4 w_warehouse_sk->[w_warehouse_sk] -------------filter((inv1.d_moy = 1)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------filter((inv2.d_moy = 2)) ---------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query4.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query4.out deleted file mode 100644 index 709da33d851bff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query4.out +++ /dev/null @@ -1,75 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_4 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk,ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF1 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF8 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.dyear = 2000) and (t_w_secyear.sale_type = 'w')) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF7 customer_id->[customer_id] -------------------PhysicalProject ---------------------filter((t_w_firstyear.dyear = 1999) and (t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year_total > 0.000000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_c_secyear.customer_id)) otherCondition=((if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL) > if((year_total > 0.000000), (cast(year_total as DECIMALV3(38, 16)) / year_total), NULL))) build RFs:RF6 customer_id->[customer_id] -----------------------PhysicalProject -------------------------filter((t_c_secyear.dyear = 2000) and (t_c_secyear.sale_type = 'c')) ---------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF6 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_c_firstyear.customer_id)) otherCondition=() build RFs:RF5 customer_id->[customer_id,customer_id] ---------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id] -----------------------------PhysicalProject -------------------------------filter((t_s_secyear.dyear = 2000) and (t_s_secyear.sale_type = 's')) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 RF5 -----------------------------PhysicalProject -------------------------------filter((t_s_firstyear.dyear = 1999) and (t_s_firstyear.sale_type = 's') and (t_s_firstyear.year_total > 0.000000)) ---------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------------------PhysicalProject -----------------------------filter((t_c_firstyear.dyear = 1999) and (t_c_firstyear.sale_type = 'c') and (t_c_firstyear.year_total > 0.000000)) -------------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query40.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query40.out deleted file mode 100644 index 041e5711184598..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query40.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_40 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF4 w_warehouse_sk->[cs_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF4 -------------------------------PhysicalProject ---------------------------------filter((item.i_current_price <= 1.49) and (item.i_current_price >= 0.99)) -----------------------------------PhysicalOlapScan[item] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_date <= '2001-06-01') and (date_dim.d_date >= '2001-04-02')) -------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query41.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query41.out deleted file mode 100644 index 0bba60d4cdac39..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query41.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_41 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_manufact = i1.i_manufact)) otherCondition=() build RFs:RF0 i_manufact->[i_manufact] -------------------PhysicalProject ---------------------filter((i1.i_manufact_id <= 744) and (i1.i_manufact_id >= 704)) -----------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((item_cnt > 0)) -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter(OR[AND[i_color IN ('forest', 'lime', 'maroon', 'navy', 'powder', 'sky', 'slate', 'smoke'),i_units IN ('Bunch', 'Case', 'Dozen', 'Gross', 'Lb', 'Ounce', 'Pallet', 'Pound'),OR[AND[(item.i_category = 'Women'),i_color IN ('forest', 'lime'),i_units IN ('Pallet', 'Pound'),i_size IN ('economy', 'small')],AND[(item.i_category = 'Women'),i_color IN ('navy', 'slate'),i_units IN ('Bunch', 'Gross'),i_size IN ('extra large', 'petite')],AND[(item.i_category = 'Men'),i_color IN ('powder', 'sky'),i_units IN ('Dozen', 'Lb'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('maroon', 'smoke'),i_units IN ('Case', 'Ounce'),i_size IN ('economy', 'small')]]],AND[i_color IN ('aquamarine', 'dark', 'firebrick', 'frosted', 'papaya', 'peach', 'plum', 'sienna'),i_units IN ('Box', 'Bundle', 'Carton', 'Cup', 'Dram', 'Each', 'Tbl', 'Ton'),OR[AND[(item.i_category = 'Women'),i_color IN ('aquamarine', 'dark'),i_units IN ('Tbl', 'Ton'),i_size IN ('economy', 'small')],AND[(item.i_category = 'Women'),i_color IN ('frosted', 'plum'),i_units IN ('Box', 'Dram'),i_size IN ('extra large', 'petite')],AND[(item.i_category = 'Men'),i_color IN ('papaya', 'peach'),i_units IN ('Bundle', 'Carton'),i_size IN ('N/A', 'large')],AND[(item.i_category = 'Men'),i_color IN ('firebrick', 'sienna'),i_units IN ('Cup', 'Each'),i_size IN ('economy', 'small')]]]] and i_category IN ('Men', 'Women') and i_size IN ('N/A', 'economy', 'extra large', 'large', 'petite', 'small')) ---------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query42.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query42.out deleted file mode 100644 index 0e8b731938b8c3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query42.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_42 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((item.i_manager_id = 1)) ---------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------filter((dt.d_moy = 11) and (dt.d_year = 1998)) -----------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query43.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query43.out deleted file mode 100644 index 37ab89010ef0a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query43.out +++ /dev/null @@ -1,22 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_43 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -----------------------PhysicalProject -------------------------filter((date_dim.d_year = 2000)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -5.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query44.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query44.out deleted file mode 100644 index 5c302c265fc9f3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query44.out +++ /dev/null @@ -1,69 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_44 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((asceding.rnk = descending.rnk)) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i1.i_item_sk = asceding.item_sk)) otherCondition=() build RFs:RF1 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 4)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 4) and ss_hdemo_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((i2.i_item_sk = descending.item_sk)) otherCondition=() build RFs:RF0 item_sk->[i_item_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------PhysicalProject -------------------filter((rnk < 11)) ---------------------PhysicalWindow -----------------------PhysicalQuickSort[MERGE_SORT] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------PhysicalPartitionTopN -------------------------------PhysicalProject ---------------------------------NestedLoopJoin[INNER_JOIN](cast(rank_col as DOUBLE) > cast((0.9 * rank_col) as DOUBLE)) -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((ss1.ss_store_sk = 4)) -----------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------PhysicalProject -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((store_sales.ss_store_sk = 4) and ss_hdemo_sk IS NULL) -----------------------------------------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query45.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query45.out deleted file mode 100644 index 72acf1dc6b71ff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query45.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_45 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter(OR[substring(ca_zip, 1, 5) IN ('80348', '81792', '83405', '85392', '85460', '85669', '86197', '86475', '88274'),$c$1]) -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ws_item_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ws_bill_customer_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_qoy = 1) and (date_dim.d_year = 2000)) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter(i_item_sk IN (11, 13, 17, 19, 2, 23, 29, 3, 5, 7)) -----------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query46.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query46.out deleted file mode 100644 index e7aa0e014dcdd9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query46.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_46 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 ca_address_sk->[c_current_addr_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 RF5 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF0 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter(s_city IN ('Fairview', 'Midway')) -----------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------filter(d_dow IN (0, 6) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 8),(household_demographics.hd_vehicle_count = 0)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query47.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query47.out deleted file mode 100644 index 0e9f713243773a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query47.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_47 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 2000),AND[(date_dim.d_year = 1999),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2001),(date_dim.d_moy = 1)]] and d_year IN (1999, 2000, 2001)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1)) and (v1.s_company_name = v1_lead.s_company_name) and (v1.s_store_name = v1_lead.s_store_name)) otherCondition=() build RFs:RF8 i_category->[i_category,i_category];RF9 i_brand->[i_brand,i_brand];RF10 s_store_name->[s_store_name,s_store_name];RF11 s_company_name->[s_company_name,s_company_name];RF12 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1)) and (v1.s_company_name = v1_lag.s_company_name) and (v1.s_store_name = v1_lag.s_store_name)) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 s_store_name->[s_store_name];RF6 s_company_name->[s_company_name];RF7 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 RF11 RF12 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2000)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF8 RF9 RF10 RF11 RF12 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query48.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query48.out deleted file mode 100644 index d11dadeae0b923..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query48.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_48 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=(OR[AND[ca_state IN ('ND', 'NY', 'SD'),(store_sales.ss_net_profit <= 2000.00)],AND[ca_state IN ('GA', 'KS', 'MD'),(store_sales.ss_net_profit >= 150.00),(store_sales.ss_net_profit <= 3000.00)],AND[ca_state IN ('CO', 'MN', 'NC'),(store_sales.ss_net_profit >= 50.00)]]) build RFs:RF1 ca_address_sk->[ss_addr_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = store_sales.ss_cdemo_sk)) otherCondition=(OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'Secondary'),(store_sales.ss_sales_price >= 100.00),(store_sales.ss_sales_price <= 150.00)],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '2 yr Degree'),(store_sales.ss_sales_price <= 100.00)],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Advanced Degree'),(store_sales.ss_sales_price >= 150.00)]]) build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_net_profit <= 25000.00) and (store_sales.ss_net_profit >= 0.00) and (store_sales.ss_sales_price <= 200.00) and (store_sales.ss_sales_price >= 50.00)) -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------PhysicalProject ---------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'S'),(customer_demographics.cd_education_status = 'Secondary')],AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = '2 yr Degree')],AND[(customer_demographics.cd_marital_status = 'D'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('2 yr Degree', 'Advanced Degree', 'Secondary') and cd_marital_status IN ('D', 'M', 'S')) -----------------------------PhysicalOlapScan[customer_demographics] ---------------------PhysicalProject -----------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('CO', 'GA', 'KS', 'MD', 'MN', 'NC', 'ND', 'NY', 'SD')) -------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------filter((date_dim.d_year = 2001)) ---------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query49.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query49.out deleted file mode 100644 index 8b807baf5409e4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query49.out +++ /dev/null @@ -1,107 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_49 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((ws.ws_item_sk = wr.wr_item_sk) and (ws.ws_order_number = wr.wr_order_number)) otherCondition=() build RFs:RF1 ws_order_number->[wr_order_number];RF2 ws_item_sk->[wr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((wr.wr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((ws.ws_net_paid > 0.00) and (ws.ws_net_profit > 1.00) and (ws.ws_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1998)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((cs.cs_item_sk = cr.cr_item_sk) and (cs.cs_order_number = cr.cr_order_number)) otherCondition=() build RFs:RF4 cs_order_number->[cr_order_number];RF5 cs_item_sk->[cr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((cr.cr_return_amount > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 RF5 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((cs.cs_net_paid > 0.00) and (cs.cs_net_profit > 1.00) and (cs.cs_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1998)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------PhysicalTopN[MERGE_SORT] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------PhysicalTopN[LOCAL_SORT] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter(OR[(return_rank <= 10),(currency_rank <= 10)]) -----------------------------------PhysicalWindow -------------------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------------------PhysicalWindow -----------------------------------------PhysicalQuickSort[MERGE_SORT] -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalQuickSort[LOCAL_SORT] -----------------------------------------------PhysicalProject -------------------------------------------------hashAgg[GLOBAL] ---------------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------------hashAgg[LOCAL] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((sts.ss_item_sk = sr.sr_item_sk) and (sts.ss_ticket_number = sr.sr_ticket_number)) otherCondition=() build RFs:RF7 ss_ticket_number->[sr_ticket_number];RF8 ss_item_sk->[sr_item_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter((sr.sr_return_amt > 10000.00)) ---------------------------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF7 RF8 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sts.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((sts.ss_net_paid > 0.00) and (sts.ss_net_profit > 1.00) and (sts.ss_quantity > 0)) -------------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 ---------------------------------------------------------------PhysicalProject -----------------------------------------------------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 1998)) -------------------------------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query5.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query5.out deleted file mode 100644 index 917f29d09727cf..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query5.out +++ /dev/null @@ -1,77 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_5 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk,ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk,ss_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF3 cp_catalog_page_sk->[cr_catalog_page_sk,cs_catalog_page_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cr_returned_date_sk,cs_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.wsr_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF7 web_site_sk->[ws_web_site_sk,ws_web_site_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((salesreturns.date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk,ws_sold_date_sk] -------------------------------------PhysicalUnion ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF6 RF7 ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_returns.wr_item_sk = web_sales.ws_item_sk) and (web_returns.wr_order_number = web_sales.ws_order_number)) otherCondition=() build RFs:RF4 wr_item_sk->[ws_item_sk];RF5 wr_order_number->[ws_order_number] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 RF7 ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-02') and (date_dim.d_date >= '2000-08-19')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query50.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query50.out deleted file mode 100644 index f5c3f38463d42c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query50.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_50 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ss_sold_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_customer_sk = store_returns.sr_customer_sk) and (store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF1 sr_ticket_number->[ss_ticket_number];RF2 sr_item_sk->[ss_item_sk];RF3 sr_customer_sk->[ss_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((d2.d_moy = 8) and (d2.d_year = 2001)) -----------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query51.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query51.out deleted file mode 100644 index 6c22d2df3086e3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query51.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_51 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((web_cumulative > store_cumulative)) -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((web.d_date = store.d_date) and (web.item_sk = store.item_sk)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1223) and (date_dim.d_month_seq >= 1212)) ---------------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalWindow -------------------------PhysicalQuickSort[LOCAL_SORT] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------PhysicalProject -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1223) and (date_dim.d_month_seq >= 1212)) ---------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query52.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query52.out deleted file mode 100644 index 45fecf5a37245e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query52.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_52 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((dt.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 1)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((dt.d_moy = 12) and (dt.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query53.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query53.out deleted file mode 100644 index d2467a65e93e09..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query53.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_53 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_quarterly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_quarterly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_quarterly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query54.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query54.out deleted file mode 100644 index 397a41b34c4e60..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query54.out +++ /dev/null @@ -1,76 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_54 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) <= (d_month_seq + 3)) -----------------------------PhysicalProject -------------------------------NestedLoopJoin[INNER_JOIN](cast(d_month_seq as BIGINT) >= (d_month_seq + 1)) ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((my_customers.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_county = store.s_county) and (customer_address.ca_state = store.s_state)) otherCondition=() build RFs:RF4 s_county->[ca_county];RF5 s_state->[ca_state] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((my_customers.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 c_current_addr_sk->[ca_address_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 RF4 RF5 -------------------------------------------------PhysicalProject ---------------------------------------------------hashAgg[GLOBAL] -----------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------hashAgg[LOCAL] ---------------------------------------------------------PhysicalProject -----------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_customer_sk = cs_or_ws_sales.customer_sk)) otherCondition=() build RFs:RF2 customer_sk->[c_customer_sk] -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------------------------------------------------PhysicalProject ---------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ws_sold_date_sk] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cs_or_ws_sales.item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ws_item_sk] ---------------------------------------------------------------------PhysicalUnion -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 -----------------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------------------------------------------PhysicalProject ---------------------------------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------filter((item.i_category = 'Music') and (item.i_class = 'country')) -------------------------------------------------------------------------PhysicalOlapScan[item] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) ---------------------------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[store] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalAssertNumRows -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) -----------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalAssertNumRows -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 1) and (date_dim.d_year = 1999)) -------------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query55.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query55.out deleted file mode 100644 index 652a5dab8d16b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query55.out +++ /dev/null @@ -1,23 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_55 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------PhysicalProject ---------------------------filter((item.i_manager_id = 52)) -----------------------------PhysicalOlapScan[item] ---------------------PhysicalProject -----------------------filter((date_dim.d_moy = 11) and (date_dim.d_year = 2000)) -------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query56.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query56.out deleted file mode 100644 index d3ec5b9b1b897e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query56.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_56 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ca_address_sk->[ss_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF7 ca_address_sk->[cs_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF4 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ws_bill_addr_sk->[ca_address_sk] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -6.00)) ---------------------------------PhysicalOlapScan[customer_address] apply RFs: RF11 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter(i_color IN ('orchid', 'pink', 'powder')) ---------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query57.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query57.out deleted file mode 100644 index 2e8174812f69ea..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query57.out +++ /dev/null @@ -1,45 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_57 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------PhysicalWindow ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((call_center.cc_call_center_sk = catalog_sales.cs_call_center_sk)) otherCondition=() build RFs:RF1 cc_call_center_sk->[cs_call_center_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter(OR[(date_dim.d_year = 2001),AND[(date_dim.d_year = 2000),(date_dim.d_moy = 12)],AND[(date_dim.d_year = 2002),(date_dim.d_moy = 1)]] and d_year IN (2000, 2001, 2002)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[call_center] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] ---PhysicalResultSink -----PhysicalProject -------PhysicalTopN[MERGE_SORT] ---------PhysicalDistribute[DistributionSpecGather] -----------PhysicalTopN[LOCAL_SORT] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((v1.cc_name = v1_lead.cc_name) and (v1.i_brand = v1_lead.i_brand) and (v1.i_category = v1_lead.i_category) and (v1.rn = expr_(rn - 1))) otherCondition=() build RFs:RF7 i_category->[i_category,i_category];RF8 i_brand->[i_brand,i_brand];RF9 cc_name->[cc_name,cc_name];RF10 expr_(rn - 1)->[(rn + 1),rn] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((v1.cc_name = v1_lag.cc_name) and (v1.i_brand = v1_lag.i_brand) and (v1.i_category = v1_lag.i_category) and (v1.rn = expr_(rn + 1))) otherCondition=() build RFs:RF3 i_category->[i_category];RF4 i_brand->[i_brand];RF5 cc_name->[cc_name];RF6 rn->[(rn + 1)] ---------------------PhysicalProject -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 RF5 RF6 RF7 RF8 RF9 RF10 ---------------------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000) and (v2.avg_monthly_sales > 0.0000) and (v2.d_year = 2001)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF7 RF8 RF9 RF10 -----------------PhysicalProject -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query58.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query58.out deleted file mode 100644 index 97b3a3af96ad02..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query58.out +++ /dev/null @@ -1,86 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_58 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = ws_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * ws_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * ws_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE)) and (cast(ws_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE))) build RFs:RF13 item_id->[i_item_id] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[ws_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF11 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF11 RF12 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF10 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF10 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF9 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalAssertNumRows ---------------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date = '2001-06-16')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] apply RFs: RF13 -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((ss_items.item_id = cs_items.item_id)) otherCondition=((cast(cs_item_rev as DOUBLE) <= cast((1.1 * ss_item_rev) as DOUBLE)) and (cast(cs_item_rev as DOUBLE) >= cast((0.9 * ss_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) <= cast((1.1 * cs_item_rev) as DOUBLE)) and (cast(ss_item_rev as DOUBLE) >= cast((0.9 * cs_item_rev) as DOUBLE))) build RFs:RF8 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-06-16')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -----------------------------------------PhysicalAssertNumRows -------------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_date = '2001-06-16')) -------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query59.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query59.out deleted file mode 100644 index 4492908c371532..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query59.out +++ /dev/null @@ -1,42 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_59 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------PhysicalProject -----------------PhysicalOlapScan[store_sales] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((expr_cast(d_week_seq1 as BIGINT) = expr_(d_week_seq2 - 52)) and (y.s_store_id1 = x.s_store_id2)) otherCondition=() build RFs:RF5 s_store_id2->[s_store_id] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq1)) otherCondition=() build RFs:RF3 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1206) and (d.d_month_seq >= 1195)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((wss.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((d.d_week_seq = d_week_seq2)) otherCondition=() build RFs:RF1 d_week_seq->[d_week_seq] -----------------------PhysicalProject -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF1 RF2 -----------------------PhysicalProject -------------------------filter((d.d_month_seq <= 1218) and (d.d_month_seq >= 1207)) ---------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query6.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query6.out deleted file mode 100644 index 43c8732d7f8553..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query6.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_6 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((cnt >= 10)) -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((a.ca_address_sk = c.c_current_addr_sk)) otherCondition=() build RFs:RF5 c_current_addr_sk->[ca_address_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer_address] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_customer_sk = s.ss_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_item_sk = i.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((s.ss_sold_date_sk = d.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d.d_month_seq = date_dim.d_month_seq)) otherCondition=() build RFs:RF1 d_month_seq->[d_month_seq] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------------PhysicalAssertNumRows -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[GLOBAL] ---------------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------------hashAgg[LOCAL] -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_moy = 3) and (date_dim.d_year = 2002)) -----------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((j.i_category = i.i_category)) otherCondition=((cast(i_current_price as DECIMALV3(38, 5)) > (1.2 * avg(cast(i_current_price as DECIMALV3(9, 4)))))) build RFs:RF0 i_category->[i_category] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF0 -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query60.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query60.out deleted file mode 100644 index e56a60c6eebef7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query60.out +++ /dev/null @@ -1,83 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_60 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalUnion -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[ss_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 10) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF0 i_item_id->[i_item_id] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Jewelry')) -----------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF6 ca_address_sk->[cs_bill_addr_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF7 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 10) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -5.00)) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF4 i_item_id->[i_item_id] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[item] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------filter((item.i_category = 'Jewelry')) -----------------------------------PhysicalOlapScan[item] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_sales.ws_bill_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF11 ca_address_sk->[ws_bill_addr_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF10 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF9 RF10 RF11 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_moy = 10) and (date_dim.d_year = 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((item.i_item_id = item.i_item_id)) otherCondition=() build RFs:RF8 i_item_id->[i_item_id] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[item] apply RFs: RF8 -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Jewelry')) ---------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter((customer_address.ca_gmt_offset = -5.00)) ---------------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query61.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query61.out deleted file mode 100644 index e768a09ec1494d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query61.out +++ /dev/null @@ -1,70 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_61 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF10 c_current_addr_sk->[ca_address_sk] -------------------PhysicalProject ---------------------filter((customer_address.ca_gmt_offset = -7.00)) -----------------------PhysicalOlapScan[customer_address] apply RFs: RF10 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF9 ss_customer_sk->[c_customer_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF9 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[ss_promo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF6 i_item_sk->[ss_item_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 RF7 RF8 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((item.i_category = 'Home')) ---------------------------------------PhysicalOlapScan[item] -------------------------------PhysicalProject ---------------------------------filter(OR[(promotion.p_channel_dmail = 'Y'),(promotion.p_channel_email = 'Y'),(promotion.p_channel_tv = 'Y')]) -----------------------------------PhysicalOlapScan[promotion] ---------------------------PhysicalProject -----------------------------filter((store.s_gmt_offset = -7.00)) -------------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter((item.i_category = 'Home')) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------PhysicalOlapScan[customer_address] -------------------PhysicalProject ---------------------filter((store.s_gmt_offset = -7.00)) -----------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query62.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query62.out deleted file mode 100644 index 00e7e385d016fe..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query62.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_62 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF2 sm_ship_mode_sk->[ws_ship_mode_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[ws_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1234) and (date_dim.d_month_seq >= 1223)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query63.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query63.out deleted file mode 100644 index bbbb80bc4b68e0..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query63.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_63 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((if((avg_monthly_sales > 0.0000), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------------PhysicalProject -----------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('personal', 'portable', 'reference', 'self-help'),i_brand IN ('exportiunivamalg #9', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9')],AND[i_category IN ('Men', 'Music', 'Women'),i_class IN ('accessories', 'classical', 'fragrances', 'pants'),i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'importoamalg #1')]] and i_brand IN ('amalgimporto #1', 'edu packscholar #1', 'exportiimporto #1', 'exportiunivamalg #9', 'importoamalg #1', 'scholaramalgamalg #14', 'scholaramalgamalg #7', 'scholaramalgamalg #9') and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Music', 'Women') and i_class IN ('accessories', 'classical', 'fragrances', 'pants', 'personal', 'portable', 'reference', 'self-help')) -------------------------------------------PhysicalOlapScan[item] -----------------------------------PhysicalProject -------------------------------------filter(d_month_seq IN (1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233)) ---------------------------------------PhysicalOlapScan[date_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query64.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query64.out deleted file mode 100644 index 8155e898243e42..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query64.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_64 -- -PhysicalCteAnchor ( cteId=CTEId#1 ) ---PhysicalCteProducer ( cteId=CTEId#1 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_shipto_date_sk = d3.d_date_sk)) otherCondition=() build RFs:RF19 d_date_sk->[c_first_shipto_date_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=(( not (cd_marital_status = cd_marital_status))) build RFs:RF18 c_customer_sk->[ss_customer_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((store_sales.ss_addr_sk = ad1.ca_address_sk)) otherCondition=() build RFs:RF17 ca_address_sk->[ss_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF16 p_promo_sk->[ss_promo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF14 ss_item_sk->[sr_item_sk];RF15 ss_ticket_number->[sr_ticket_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF14 RF15 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = cd1.cd_demo_sk)) otherCondition=() build RFs:RF13 cd_demo_sk->[ss_cdemo_sk] -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF12 i_item_sk->[cr_item_sk,cs_item_sk,ss_item_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd1.hd_income_band_sk = ib1.ib_income_band_sk)) otherCondition=() build RFs:RF10 ib_income_band_sk->[hd_income_band_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = hd1.hd_demo_sk)) otherCondition=() build RFs:RF9 hd_demo_sk->[ss_hdemo_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = cs_ui.cs_item_sk)) otherCondition=() build RFs:RF8 cs_item_sk->[ss_item_sk] -------------------------------------------------------PhysicalProject ---------------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF7 d_date_sk->[ss_sold_date_sk] -----------------------------------------------------------PhysicalProject -------------------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF7 RF8 RF9 RF11 RF12 RF13 RF16 RF17 RF18 -----------------------------------------------------------PhysicalProject -------------------------------------------------------------filter(d_year IN (1999, 2000)) ---------------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((sale > (2 * refund))) -----------------------------------------------------------hashAgg[GLOBAL] -------------------------------------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------------------------------------hashAgg[LOCAL] -----------------------------------------------------------------PhysicalProject -------------------------------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF5 cr_item_sk->[cs_item_sk];RF6 cr_order_number->[cs_order_number] ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 RF6 RF12 ---------------------------------------------------------------------PhysicalProject -----------------------------------------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF12 ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF10 -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[income_band] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[store] ---------------------------------------PhysicalProject -----------------------------------------filter((item.i_current_price <= 58.00) and (item.i_current_price >= 49.00) and i_color IN ('blush', 'lace', 'lawn', 'misty', 'orange', 'pink')) -------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[promotion] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_current_addr_sk = ad2.ca_address_sk)) otherCondition=() build RFs:RF4 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_cdemo_sk = cd2.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[c_current_cdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_first_sales_date_sk = d2.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[c_first_sales_date_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_hdemo_sk = hd2.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 RF3 RF4 RF19 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((hd2.hd_income_band_sk = ib2.ib_income_band_sk)) otherCondition=() build RFs:RF0 ib_income_band_sk->[hd_income_band_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[household_demographics] apply RFs: RF0 -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[income_band] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[customer_demographics] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] -----------------PhysicalProject -------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalQuickSort[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalQuickSort[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((cs1.item_sk = cs2.item_sk) and (cs1.store_name = cs2.store_name) and (cs1.store_zip = cs2.store_zip)) otherCondition=((cs2.cnt <= cs1.cnt)) build RFs:RF20 item_sk->[item_sk];RF21 store_name->[store_name];RF22 store_zip->[store_zip] ---------------PhysicalProject -----------------filter((cs1.syear = 1999)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) apply RFs: RF20 RF21 RF22 ---------------PhysicalProject -----------------filter((cs2.syear = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#1 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query65.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query65.out deleted file mode 100644 index 67cd990e11a2e8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query65.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_65 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = sc.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = sc.ss_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk,ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((sb.ss_store_sk = sc.ss_store_sk)) otherCondition=((cast(revenue as DOUBLE) <= cast((0.1 * ave) as DOUBLE))) build RFs:RF2 ss_store_sk->[ss_store_sk] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 RF3 RF4 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1187) and (date_dim.d_month_seq >= 1176)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_month_seq <= 1187) and (date_dim.d_month_seq >= 1176)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[store] -------------PhysicalProject ---------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query66.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query66.out deleted file mode 100644 index 924459179feee9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query66.out +++ /dev/null @@ -1,62 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_66 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------PhysicalUnion -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF3 w_warehouse_sk->[ws_warehouse_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[ws_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF0 sm_ship_mode_sk->[ws_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('BOXBUNDLES', 'ORIENTAL')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 71770) and (time_dim.t_time >= 42970)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF7 w_warehouse_sk->[cs_warehouse_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[cs_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF4 sm_ship_mode_sk->[cs_ship_mode_sk] -------------------------------------------PhysicalProject ---------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF4 RF5 RF6 RF7 -------------------------------------------PhysicalProject ---------------------------------------------filter(sm_carrier IN ('BOXBUNDLES', 'ORIENTAL')) -----------------------------------------------PhysicalOlapScan[ship_mode] ---------------------------------------PhysicalProject -----------------------------------------filter((date_dim.d_year = 2001)) -------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------PhysicalProject -------------------------------------filter((cast(t_time as BIGINT) <= 71770) and (time_dim.t_time >= 42970)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query67.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query67.out deleted file mode 100644 index 11f2d128129a53..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query67.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_67 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------filter((rk <= 100)) -----------PhysicalWindow -------------PhysicalPartitionTopN ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalPartitionTopN -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_month_seq <= 1228) and (date_dim.d_month_seq >= 1217)) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query68.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query68.out deleted file mode 100644 index 2f4fbe401f1315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query68.out +++ /dev/null @@ -1,38 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_68 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_current_addr_sk = current_addr.ca_address_sk)) otherCondition=(( not (ca_city = bought_city))) build RFs:RF5 c_current_addr_sk->[ca_address_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer_address] apply RFs: RF5 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((dn.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF4 ss_customer_sk->[c_customer_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------PhysicalProject -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((store_sales.ss_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF3 ss_addr_sk->[ca_address_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer_address] apply RFs: RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (1998, 1999, 2000)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------filter(s_city IN ('Fairview', 'Midway')) -------------------------------------PhysicalOlapScan[store] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 3),(household_demographics.hd_vehicle_count = 4)]) ---------------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query69.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query69.out deleted file mode 100644 index 31101f12eab21a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query69.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_69 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((c.c_customer_sk = store_sales.ss_customer_sk)) otherCondition=() build RFs:RF6 c_customer_sk->[ss_customer_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF5 RF6 -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = catalog_sales.cs_ship_customer_sk)) otherCondition=() build RFs:RF4 c_customer_sk->[cs_ship_customer_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF3 RF4 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2002)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = c.c_current_cdemo_sk)) otherCondition=() build RFs:RF2 c_current_cdemo_sk->[cd_demo_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF2 ---------------------------hashJoin[LEFT_ANTI_JOIN shuffle] hashCondition=((c.c_customer_sk = web_sales.ws_bill_customer_sk)) otherCondition=() -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((c.c_current_addr_sk = ca.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------filter(ca_state IN ('IL', 'ME', 'TX')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_moy <= 3) and (date_dim.d_moy >= 1) and (date_dim.d_year = 2002)) -------------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query7.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query7.out deleted file mode 100644 index 2d63af9e61b19e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query7.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_7 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF2 p_promo_sk->[ss_promo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[ss_cdemo_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((customer_demographics.cd_education_status = 'College') and (customer_demographics.cd_gender = 'F') and (customer_demographics.cd_marital_status = 'W')) -----------------------------------PhysicalOlapScan[customer_demographics] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 2001)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------filter(OR[(promotion.p_channel_email = 'N'),(promotion.p_channel_event = 'N')]) ---------------------------PhysicalOlapScan[promotion] -------------------PhysicalProject ---------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query70.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query70.out deleted file mode 100644 index ec1bdd0e99afb6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query70.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_70 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF4 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1231) and (d1.d_month_seq >= 1220)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((store.s_state = tmp1.s_state)) otherCondition=() build RFs:RF2 s_state->[s_state] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------hashAgg[GLOBAL] ---------------------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------------------hashAgg[LOCAL] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store.s_store_sk = store_sales.ss_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------------------------------PhysicalProject -----------------------------------------------------filter((date_dim.d_month_seq <= 1231) and (date_dim.d_month_seq >= 1220)) -------------------------------------------------------PhysicalOlapScan[date_dim] -----------------------------------------------PhysicalProject -------------------------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query71.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query71.out deleted file mode 100644 index 0d26e1f81ccb94..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query71.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_71 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF2 t_time_sk->[cs_sold_time_sk,ss_sold_time_sk,ws_sold_time_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((tmp.sold_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk,ss_item_sk,ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalUnion ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------PhysicalProject -------------------------------filter((item.i_manager_id = 1)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2002)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------filter(t_meal_time IN ('breakfast', 'dinner')) -------------------------PhysicalOlapScan[time_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query72.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query72.out deleted file mode 100644 index 06bd3cd70cf867..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query72.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_72 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((warehouse.w_warehouse_sk = inventory.inv_warehouse_sk)) otherCondition=() build RFs:RF10 w_warehouse_sk->[inv_warehouse_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((catalog_sales.cs_item_sk = inventory.inv_item_sk) and (inventory.inv_date_sk = d2.d_date_sk)) otherCondition=((inventory.inv_quantity_on_hand < catalog_sales.cs_quantity)) build RFs:RF8 inv_date_sk->[d_date_sk];RF9 inv_item_sk->[cs_item_sk,i_item_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cs_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_week_seq = d2.d_week_seq)) otherCondition=() build RFs:RF6 d_week_seq->[d_week_seq] -------------------------------PhysicalProject ---------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_returns.cr_item_sk = catalog_sales.cs_item_sk) and (catalog_returns.cr_order_number = catalog_sales.cs_order_number)) otherCondition=() build RFs:RF4 cs_item_sk->[cr_item_sk];RF5 cs_order_number->[cr_order_number] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 RF5 -----------------------------------PhysicalProject -------------------------------------hashJoin[LEFT_OUTER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() ---------------------------------------PhysicalProject -----------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF3 cd_demo_sk->[cs_bill_cdemo_sk] -------------------------------------------PhysicalProject ---------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[cs_bill_hdemo_sk] -----------------------------------------------PhysicalProject -------------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = d3.d_date_sk) and (catalog_sales.cs_sold_date_sk = d1.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk];RF1 d_date_sk->[cs_sold_date_sk] ---------------------------------------------------PhysicalProject -----------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 RF7 RF9 ---------------------------------------------------PhysicalProject -----------------------------------------------------NestedLoopJoin[INNER_JOIN](d3.d_date > days_add(d_date, INTERVAL 5 DAY)) -------------------------------------------------------PhysicalProject ---------------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------------------------PhysicalProject ---------------------------------------------------------filter((d1.d_year = 1998)) -----------------------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF6 -----------------------------------------------PhysicalProject -------------------------------------------------filter((household_demographics.hd_buy_potential = '1001-5000')) ---------------------------------------------------PhysicalOlapScan[household_demographics] -------------------------------------------PhysicalProject ---------------------------------------------filter((customer_demographics.cd_marital_status = 'S')) -----------------------------------------------PhysicalOlapScan[customer_demographics] ---------------------------------------PhysicalProject -----------------------------------------PhysicalOlapScan[promotion] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[item] apply RFs: RF9 -----------------------PhysicalOlapScan[inventory] apply RFs: RF10 -------------------PhysicalProject ---------------------PhysicalOlapScan[warehouse] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query73.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query73.out deleted file mode 100644 index 52c88ab966b1c9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query73.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_73 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((dj.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 ss_customer_sk->[c_customer_sk] -------------PhysicalProject ---------------PhysicalOlapScan[customer] apply RFs: RF3 -------------filter((dj.cnt <= 5) and (dj.cnt >= 1)) ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[ss_store_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dom <= 2) and (date_dim.d_dom >= 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter((store.s_county = 'Williamson County')) ---------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count > 0) and (if((hd_vehicle_count > 0), (cast(hd_dep_count as DOUBLE) / cast(hd_vehicle_count as DOUBLE)), NULL) > 1.0) and hd_buy_potential IN ('1001-5000', '5001-10000')) -----------------------------PhysicalOlapScan[household_demographics] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query74.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query74.out deleted file mode 100644 index 30e95b3fd06a84..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query74.out +++ /dev/null @@ -1,54 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_74 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF2 c_customer_sk->[ss_customer_sk,ws_bill_customer_sk] ---------PhysicalProject -----------PhysicalUnion -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter(d_year IN (1999, 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------PhysicalProject -----------PhysicalOlapScan[customer] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((t_s_firstyear.customer_id = t_w_secyear.customer_id)) otherCondition=((if((year_total > 0.00), (cast(year_total as DECIMALV3(13, 8)) / year_total), NULL) > if((year_total > 0.00), (cast(year_total as DECIMALV3(13, 8)) / year_total), NULL))) build RFs:RF5 customer_id->[customer_id] ---------------PhysicalProject -----------------filter((t_w_secyear.sale_type = 'w') and (t_w_secyear.year = 2000)) -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t_s_firstyear.customer_id = t_w_firstyear.customer_id)) otherCondition=() build RFs:RF4 customer_id->[customer_id,customer_id] -------------------hashJoin[INNER_JOIN shuffle] hashCondition=((t_s_secyear.customer_id = t_s_firstyear.customer_id)) otherCondition=() build RFs:RF3 customer_id->[customer_id] ---------------------PhysicalProject -----------------------filter((t_s_secyear.sale_type = 's') and (t_s_secyear.year = 2000)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 ---------------------PhysicalProject -----------------------filter((t_s_firstyear.sale_type = 's') and (t_s_firstyear.year = 1999) and (t_s_firstyear.year_total > 0.00)) -------------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF4 -------------------PhysicalProject ---------------------filter((t_w_firstyear.sale_type = 'w') and (t_w_firstyear.year = 1999) and (t_w_firstyear.year_total > 0.00)) -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query75.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query75.out deleted file mode 100644 index c26b81b87791ba..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query75.out +++ /dev/null @@ -1,73 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_75 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecHash] ---------hashAgg[LOCAL] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalUnion -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF2 cs_order_number->[cr_order_number];RF3 cs_item_sk->[cr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF2 RF3 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = catalog_sales.cs_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = catalog_sales.cs_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[cs_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Sports')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (2001, 2002)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF6 ss_ticket_number->[sr_ticket_number];RF7 ss_item_sk->[sr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_returns] apply RFs: RF6 RF7 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = store_sales.ss_sold_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = store_sales.ss_item_sk)) otherCondition=() build RFs:RF4 i_item_sk->[ss_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Sports')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (2001, 2002)) ---------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalDistribute[DistributionSpecExecutionAny] ---------------------PhysicalProject -----------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF10 ws_order_number->[wr_order_number];RF11 ws_item_sk->[wr_item_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[web_returns] apply RFs: RF10 RF11 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF9 d_date_sk->[ws_sold_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 ---------------------------------PhysicalProject -----------------------------------filter((item.i_category = 'Sports')) -------------------------------------PhysicalOlapScan[item] -----------------------------PhysicalProject -------------------------------filter(d_year IN (2001, 2002)) ---------------------------------PhysicalOlapScan[date_dim] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffle] hashCondition=((curr_yr.i_brand_id = prev_yr.i_brand_id) and (curr_yr.i_category_id = prev_yr.i_category_id) and (curr_yr.i_class_id = prev_yr.i_class_id) and (curr_yr.i_manufact_id = prev_yr.i_manufact_id)) otherCondition=(((cast(cast(sales_cnt as DECIMALV3(17, 2)) as DECIMALV3(23, 8)) / cast(sales_cnt as DECIMALV3(17, 2))) < 0.900000)) build RFs:RF12 i_brand_id->[i_brand_id];RF13 i_class_id->[i_class_id];RF14 i_category_id->[i_category_id];RF15 i_manufact_id->[i_manufact_id] ---------------filter((curr_yr.d_year = 2002)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF12 RF13 RF14 RF15 ---------------filter((prev_yr.d_year = 2001)) -----------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query76.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query76.out deleted file mode 100644 index 473b9fded85715..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query76.out +++ /dev/null @@ -1,40 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_76 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF3 d_date_sk->[cs_sold_date_sk,ss_sold_date_sk,ws_sold_date_sk] -------------------PhysicalProject ---------------------PhysicalUnion -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------PhysicalProject -------------------------------filter(ss_customer_sk IS NULL) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 ws_item_sk->[i_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter(ws_promo_sk IS NULL) ---------------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 -----------------------PhysicalDistribute[DistributionSpecExecutionAny] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[cs_item_sk] -----------------------------PhysicalProject -------------------------------filter(cs_bill_customer_sk IS NULL) ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] -------------------PhysicalProject ---------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query77.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query77.out deleted file mode 100644 index 3659671c869dc8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query77.out +++ /dev/null @@ -1,101 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_77 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ss.s_store_sk = sr.s_store_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF1 s_store_sk->[sr_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[sr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF5 d_date_sk->[cs_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF4 -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.wp_web_page_sk = wr.wp_web_page_sk)) otherCondition=() -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF9 wp_web_page_sk->[ws_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF8 d_date_sk->[ws_sold_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF8 RF9 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] -------------------------PhysicalProject ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecHash] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF7 wp_web_page_sk->[wr_web_page_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[wr_returned_date_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 RF7 -----------------------------------------PhysicalProject -------------------------------------------filter((date_dim.d_date <= '2000-09-09') and (date_dim.d_date >= '2000-08-10')) ---------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query78.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query78.out deleted file mode 100644 index 0663ee2198a5fe..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query78.out +++ /dev/null @@ -1,57 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_78 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter(OR[(coalesce(ws_qty, 0) > 0),(coalesce(cs_qty, 0) > 0)]) -------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((cs.cs_customer_sk = ss.ss_customer_sk) and (cs.cs_item_sk = ss.ss_item_sk) and (cs.cs_sold_year = ss.ss_sold_year)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[LEFT_OUTER_JOIN colocated] hashCondition=((ws.ws_customer_sk = ss.ss_customer_sk) and (ws.ws_item_sk = ss.ss_item_sk) and (ws.ws_sold_year = ss.ss_sold_year)) otherCondition=() -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((store_returns.sr_ticket_number = store_sales.ss_ticket_number) and (store_sales.ss_item_sk = store_returns.sr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1998)) -----------------------------------PhysicalOlapScan[date_dim] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((web_returns.wr_order_number = web_sales.ws_order_number) and (web_sales.ws_item_sk = web_returns.wr_item_sk)) otherCondition=() -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[web_returns] -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_year = 1998)) -----------------------------------PhysicalOlapScan[date_dim] ---------------PhysicalProject -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------hashJoin[LEFT_ANTI_JOIN colocated] hashCondition=((catalog_returns.cr_order_number = catalog_sales.cs_order_number) and (catalog_sales.cs_item_sk = catalog_returns.cr_item_sk)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_returns] ---------------------------PhysicalProject -----------------------------filter((date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query79.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query79.out deleted file mode 100644 index e3d8f3af326d91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query79.out +++ /dev/null @@ -1,32 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_79 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((ms.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ss_customer_sk] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((date_dim.d_dow = 1) and d_year IN (2000, 2001, 2002)) -------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------filter(OR[(household_demographics.hd_dep_count = 7),(household_demographics.hd_vehicle_count > -1)]) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_number_employees <= 295) and (store.s_number_employees >= 200)) -----------------------------PhysicalOlapScan[store] -------------PhysicalProject ---------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query8.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query8.out deleted file mode 100644 index a254eeae049f91..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query8.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_8 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_substring(s_zip, 1, 2) = expr_substring(ca_zip, 1, 2))) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_qoy = 2) and (date_dim.d_year = 1998)) -------------------------------PhysicalOlapScan[date_dim] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store] -------------------PhysicalProject ---------------------PhysicalIntersect -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter((cnt > 10)) -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[c_current_addr_sk] ---------------------------------------PhysicalProject -----------------------------------------filter((customer.c_preferred_cust_flag = 'Y')) -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 ---------------------------------------PhysicalProject -----------------------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -------------------------------------------PhysicalOlapScan[customer_address] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------PhysicalProject ---------------------------filter(substring(ca_zip, 1, 5) IN ('10298', '10374', '10425', '11340', '11489', '11618', '11652', '11686', '11855', '11912', '12197', '12318', '12320', '12350', '13086', '13123', '13261', '13338', '13376', '13378', '13443', '13844', '13869', '13918', '14073', '14155', '14196', '14242', '14312', '14440', '14530', '14851', '15371', '15475', '15543', '15734', '15751', '15782', '15794', '16005', '16226', '16364', '16515', '16704', '16791', '16891', '17167', '17193', '17291', '17672', '17819', '17879', '17895', '18218', '18360', '18367', '18410', '18421', '18434', '18569', '18700', '18767', '18829', '18884', '19326', '19444', '19489', '19753', '19833', '19988', '20244', '20317', '20534', '20601', '20712', '21060', '21094', '21204', '21231', '21343', '21727', '21800', '21814', '22728', '22815', '22911', '23065', '23952', '24227', '24255', '24286', '24594', '24660', '24891', '24987', '25115', '25178', '25214', '25264', '25333', '25494', '25717', '25973', '26217', '26689', '27052', '27116', '27156', '27287', '27369', '27385', '27413', '27642', '27700', '28055', '28239', '28571', '28577', '28810', '29086', '29392', '29450', '29752', '29818', '30106', '30415', '30621', '31013', '31016', '31655', '31830', '32489', '32669', '32754', '32919', '32958', '32961', '33113', '33122', '33159', '33467', '33562', '33773', '33869', '34306', '34473', '34594', '34948', '34972', '35076', '35390', '35834', '35863', '35926', '36201', '36335', '36430', '36479', '37119', '37788', '37914', '38353', '38607', '38919', '39214', '39459', '39500', '39503', '40146', '40936', '40979', '41162', '41232', '41255', '41331', '41351', '41352', '41419', '41807', '41836', '41967', '42361', '43432', '43639', '43830', '43933', '44529', '45266', '45484', '45533', '45645', '45676', '45859', '46081', '46131', '46507', '47289', '47369', '47529', '47602', '47770', '48017', '48162', '48333', '48530', '48567', '49101', '49130', '49140', '49211', '49230', '49254', '49472', '50412', '50632', '50636', '50679', '50788', '51089', '51184', '51195', '51634', '51717', '51766', '51782', '51793', '51933', '52094', '52301', '52389', '52868', '53163', '53535', '53565', '54010', '54207', '54364', '54558', '54585', '55233', '55349', '56224', '56355', '56436', '56455', '56600', '56877', '57025', '57553', '57631', '57649', '57839', '58032', '58058', '58062', '58117', '58218', '58412', '58454', '58581', '59004', '59080', '59130', '59226', '59345', '59386', '59494', '59852', '60083', '60298', '60560', '60624', '60736', '61527', '61794', '61860', '61997', '62361', '62585', '62878', '63073', '63180', '63193', '63294', '63792', '63991', '64592', '65148', '65177', '65501', '66057', '66943', '67881', '67975', '67998', '68101', '68293', '68341', '68605', '68730', '68770', '68843', '68852', '68908', '69280', '69952', '69998', '70041', '70070', '70073', '70450', '71144', '71256', '71286', '71836', '71948', '71954', '71997', '72592', '72991', '73021', '73108', '73134', '73146', '73219', '73873', '74686', '75660', '75675', '75742', '75752', '77454', '77817', '78093', '78366', '79077', '79658', '80332', '80846', '81003', '81070', '81084', '81335', '81504', '81755', '81963', '82080', '82602', '82620', '83041', '83086', '83583', '83647', '83833', '83910', '83986', '84247', '84680', '84844', '84919', '85066', '85761', '86057', '86379', '86709', '88086', '88137', '88217', '89193', '89338', '90209', '90229', '90669', '91110', '91894', '92292', '92380', '92645', '92696', '93498', '94791', '94835', '94898', '95042', '95430', '95464', '95694', '96435', '96560', '97173', '97462', '98069', '98072', '98338', '98533', '98569', '98584', '98862', '99060', '99132')) -----------------------------PhysicalOlapScan[customer_address] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query80.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query80.out deleted file mode 100644 index e33fb4f9e86ba9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query80.out +++ /dev/null @@ -1,100 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_80 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalRepeat -------------------PhysicalUnion ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((store_sales.ss_item_sk = store_returns.sr_item_sk) and (store_sales.ss_ticket_number = store_returns.sr_ticket_number)) otherCondition=() build RFs:RF4 ss_item_sk->[sr_item_sk];RF5 ss_ticket_number->[sr_ticket_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF4 RF5 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF3 s_store_sk->[ss_store_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF1 p_promo_sk->[ss_promo_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '2002-09-13') and (date_dim.d_date >= '2002-08-14')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------------------PhysicalOlapScan[promotion] -----------------------------------------PhysicalProject -------------------------------------------filter((item.i_current_price > 50.00)) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[store] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_catalog_page_sk = catalog_page.cp_catalog_page_sk)) otherCondition=() build RFs:RF11 cp_catalog_page_sk->[cs_catalog_page_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((catalog_sales.cs_item_sk = catalog_returns.cr_item_sk) and (catalog_sales.cs_order_number = catalog_returns.cr_order_number)) otherCondition=() build RFs:RF9 cs_item_sk->[cr_item_sk];RF10 cs_order_number->[cr_order_number] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF9 RF10 -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF8 i_item_sk->[cs_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF7 p_promo_sk->[cs_promo_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cs_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF6 RF7 RF8 RF11 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '2002-09-13') and (date_dim.d_date >= '2002-08-14')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------------------PhysicalOlapScan[promotion] -----------------------------------------PhysicalProject -------------------------------------------filter((item.i_current_price > 50.00)) ---------------------------------------------PhysicalOlapScan[item] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_page] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[RIGHT_OUTER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF16 ws_item_sk->[wr_item_sk];RF17 ws_order_number->[wr_order_number] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_returns] apply RFs: RF16 RF17 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF15 web_site_sk->[ws_web_site_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF14 i_item_sk->[ws_item_sk] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_promo_sk = promotion.p_promo_sk)) otherCondition=() build RFs:RF13 p_promo_sk->[ws_promo_sk] ---------------------------------------------PhysicalProject -----------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF12 d_date_sk->[ws_sold_date_sk] -------------------------------------------------PhysicalProject ---------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF12 RF13 RF14 RF15 -------------------------------------------------PhysicalProject ---------------------------------------------------filter((date_dim.d_date <= '2002-09-13') and (date_dim.d_date >= '2002-08-14')) -----------------------------------------------------PhysicalOlapScan[date_dim] ---------------------------------------------PhysicalProject -----------------------------------------------filter((promotion.p_channel_tv = 'N')) -------------------------------------------------PhysicalOlapScan[promotion] -----------------------------------------PhysicalProject -------------------------------------------filter((item.i_current_price > 50.00)) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query81.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query81.out deleted file mode 100644 index b3b6627dd0716d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query81.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_81 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecHash] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_returns.cr_returning_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[cr_returning_addr_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cr_returned_date_sk] ---------------------PhysicalProject -----------------------PhysicalOlapScan[catalog_returns] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((date_dim.d_year = 2001)) -------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_address] ---PhysicalResultSink -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((ctr1.ctr_state = ctr2.ctr_state)) otherCondition=((cast(ctr_total_return as DOUBLE) > cast((avg(cast(ctr_total_return as DECIMALV3(38, 4))) * 1.2) as DOUBLE))) build RFs:RF4 ctr_state->[ctr_state] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffle] hashCondition=((ctr1.ctr_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[ctr_customer_sk] -------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF3 RF4 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 ca_address_sk->[c_current_addr_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF2 -----------------------PhysicalProject -------------------------filter((customer_address.ca_state = 'TN')) ---------------------------PhysicalOlapScan[customer_address] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalDistribute[DistributionSpecExecutionAny] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query82.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query82.out deleted file mode 100644 index a1bb3a33e1d0ff..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query82.out +++ /dev/null @@ -1,27 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_82 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF2 i_item_sk->[ss_item_sk] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = inventory.inv_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[inv_date_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((inventory.inv_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[inv_item_sk] ---------------------------PhysicalProject -----------------------------filter((inventory.inv_quantity_on_hand <= 500) and (inventory.inv_quantity_on_hand >= 100)) -------------------------------PhysicalOlapScan[inventory] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((item.i_current_price <= 88.00) and (item.i_current_price >= 58.00) and i_manufact_id IN (259, 485, 559, 580)) -------------------------------PhysicalOlapScan[item] -----------------------PhysicalProject -------------------------filter((date_dim.d_date <= '2001-03-14') and (date_dim.d_date >= '2001-01-13')) ---------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query83.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query83.out deleted file mode 100644 index 24a4ca5fa16b24..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query83.out +++ /dev/null @@ -1,80 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_83 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = wr_items.item_id)) otherCondition=() build RFs:RF13 item_id->[i_item_id,i_item_id] -------------PhysicalProject ---------------hashJoin[INNER_JOIN colocated] hashCondition=((sr_items.item_id = cr_items.item_id)) otherCondition=() build RFs:RF12 item_id->[i_item_id] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF11 i_item_sk->[sr_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF10 d_date_sk->[sr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_returns] apply RFs: RF10 RF11 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF9 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF9 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF8 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF8 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-07-13', '2001-09-10', '2001-11-16')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF12 RF13 -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((catalog_returns.cr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF7 i_item_sk->[cr_item_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF6 d_date_sk->[cr_returned_date_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF6 RF7 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF5 d_date->[d_date] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF5 -------------------------------------PhysicalProject ---------------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF4 d_week_seq->[d_week_seq] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF4 -----------------------------------------PhysicalProject -------------------------------------------filter(d_date IN ('2001-07-13', '2001-09-10', '2001-11-16')) ---------------------------------------------PhysicalOlapScan[date_dim] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[item] apply RFs: RF13 -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecHash] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((web_returns.wr_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF3 i_item_sk->[wr_item_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_returns.wr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[wr_returned_date_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_returns] apply RFs: RF2 RF3 -----------------------------PhysicalProject -------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_date = date_dim.d_date)) otherCondition=() build RFs:RF1 d_date->[d_date] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[date_dim] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[LEFT_SEMI_JOIN broadcast] hashCondition=((date_dim.d_week_seq = date_dim.d_week_seq)) otherCondition=() build RFs:RF0 d_week_seq->[d_week_seq] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[date_dim] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter(d_date IN ('2001-07-13', '2001-09-10', '2001-11-16')) -----------------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query84.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query84.out deleted file mode 100644 index 050e21b33ef3c1..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query84.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_84 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_cdemo_sk = customer_demographics.cd_demo_sk)) otherCondition=() build RFs:RF4 cd_demo_sk->[sr_cdemo_sk] -------------PhysicalProject ---------------PhysicalOlapScan[store_returns] apply RFs: RF4 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF3 c_current_cdemo_sk->[cd_demo_sk] -----------------PhysicalProject -------------------PhysicalOlapScan[customer_demographics] apply RFs: RF3 -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF2 hd_demo_sk->[c_current_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_current_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[c_current_addr_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -------------------------PhysicalProject ---------------------------filter((customer_address.ca_city = 'Woodland')) -----------------------------PhysicalOlapScan[customer_address] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((income_band.ib_income_band_sk = household_demographics.hd_income_band_sk)) otherCondition=() build RFs:RF0 ib_income_band_sk->[hd_income_band_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[household_demographics] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((cast(ib_upper_bound as BIGINT) <= 110306) and (income_band.ib_lower_bound >= 60306)) -----------------------------PhysicalOlapScan[income_band] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query85.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query85.out deleted file mode 100644 index 6f4f258704f239..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query85.out +++ /dev/null @@ -1,46 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_85 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((reason.r_reason_sk = web_returns.wr_reason_sk)) otherCondition=() build RFs:RF9 r_reason_sk->[wr_reason_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_education_status = cd2.cd_education_status) and (cd1.cd_marital_status = cd2.cd_marital_status) and (cd2.cd_demo_sk = web_returns.wr_returning_cdemo_sk)) otherCondition=() build RFs:RF6 wr_returning_cdemo_sk->[cd_demo_sk];RF7 cd_marital_status->[cd_marital_status];RF8 cd_education_status->[cd_education_status] -------------------------PhysicalProject ---------------------------filter(cd_education_status IN ('Advanced Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'S', 'U')) -----------------------------PhysicalOlapScan[customer_demographics] apply RFs: RF6 RF7 RF8 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = web_returns.wr_refunded_addr_sk)) otherCondition=(OR[AND[ca_state IN ('IA', 'NC', 'TX'),(web_sales.ws_net_profit >= 100.00),(web_sales.ws_net_profit <= 200.00)],AND[ca_state IN ('GA', 'WI', 'WV'),(web_sales.ws_net_profit >= 150.00)],AND[ca_state IN ('KY', 'OK', 'VA'),(web_sales.ws_net_profit <= 250.00)]]) build RFs:RF4 wr_refunded_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_country = 'United States') and ca_state IN ('GA', 'IA', 'KY', 'NC', 'OK', 'TX', 'VA', 'WI', 'WV')) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((cd1.cd_demo_sk = web_returns.wr_refunded_cdemo_sk)) otherCondition=(OR[AND[(cd1.cd_marital_status = 'D'),(cd1.cd_education_status = 'Primary'),(web_sales.ws_sales_price >= 100.00),(web_sales.ws_sales_price <= 150.00)],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'College'),(web_sales.ws_sales_price <= 100.00)],AND[(cd1.cd_marital_status = 'U'),(cd1.cd_education_status = 'Advanced Degree'),(web_sales.ws_sales_price >= 150.00)]]) build RFs:RF3 cd_demo_sk->[wr_refunded_cdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((web_sales.ws_item_sk = web_returns.wr_item_sk) and (web_sales.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF1 ws_item_sk->[wr_item_sk];RF2 ws_order_number->[wr_order_number] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[web_returns] apply RFs: RF1 RF2 RF3 RF9 -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] ---------------------------------------------PhysicalProject -----------------------------------------------filter((web_sales.ws_net_profit <= 300.00) and (web_sales.ws_net_profit >= 50.00) and (web_sales.ws_sales_price <= 200.00) and (web_sales.ws_sales_price >= 50.00)) -------------------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF5 ---------------------------------------------PhysicalProject -----------------------------------------------filter((date_dim.d_year = 1998)) -------------------------------------------------PhysicalOlapScan[date_dim] -------------------------------------PhysicalProject ---------------------------------------filter(OR[AND[(cd1.cd_marital_status = 'D'),(cd1.cd_education_status = 'Primary')],AND[(cd1.cd_marital_status = 'S'),(cd1.cd_education_status = 'College')],AND[(cd1.cd_marital_status = 'U'),(cd1.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'College', 'Primary') and cd_marital_status IN ('D', 'S', 'U')) -----------------------------------------PhysicalOlapScan[customer_demographics] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_page] ---------------------PhysicalProject -----------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query86.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query86.out deleted file mode 100644 index 24ed7d94f66e63..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query86.out +++ /dev/null @@ -1,28 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_86 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalRepeat -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ws_item_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((d1.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ws_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -------------------------------------PhysicalProject ---------------------------------------filter((d1.d_month_seq <= 1197) and (d1.d_month_seq >= 1186)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query87.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query87.out deleted file mode 100644 index d21bcaff8bef43..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query87.out +++ /dev/null @@ -1,48 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_87 -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------PhysicalExcept -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF1 c_customer_sk->[ss_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1213) and (date_dim.d_month_seq >= 1202)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cs_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[cs_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1213) and (date_dim.d_month_seq >= 1202)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_sales.ws_bill_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF5 c_customer_sk->[ws_bill_customer_sk] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[ws_sold_date_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((date_dim.d_month_seq <= 1213) and (date_dim.d_month_seq >= 1202)) -------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query88.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query88.out deleted file mode 100644 index 5da04ad61d3f42..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query88.out +++ /dev/null @@ -1,171 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_88 -- -PhysicalResultSink ---NestedLoopJoin[CROSS_JOIN] -----NestedLoopJoin[CROSS_JOIN] -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF23 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF22 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF21 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF21 RF22 RF23 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF20 s_store_sk->[ss_store_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF19 hd_demo_sk->[ss_hdemo_sk] -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF18 t_time_sk->[ss_sold_time_sk] -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF18 RF19 RF20 -----------------------------------PhysicalProject -------------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute < 30)) ---------------------------------------PhysicalOlapScan[time_dim] -------------------------------PhysicalProject ---------------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -----------------------------------PhysicalOlapScan[household_demographics] ---------------------------PhysicalProject -----------------------------filter((store.s_store_name = 'ese')) -------------------------------PhysicalOlapScan[store] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF17 s_store_sk->[ss_store_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF16 hd_demo_sk->[ss_hdemo_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF15 t_time_sk->[ss_sold_time_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store_sales] apply RFs: RF15 RF16 RF17 ---------------------------------PhysicalProject -----------------------------------filter((time_dim.t_hour = 9) and (time_dim.t_minute >= 30)) -------------------------------------PhysicalOlapScan[time_dim] -----------------------------PhysicalProject -------------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) ---------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((store.s_store_name = 'ese')) -----------------------------PhysicalOlapScan[store] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF14 s_store_sk->[ss_store_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF13 hd_demo_sk->[ss_hdemo_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF12 t_time_sk->[ss_sold_time_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF12 RF13 RF14 -------------------------------PhysicalProject ---------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute < 30)) -----------------------------------PhysicalOlapScan[time_dim] ---------------------------PhysicalProject -----------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -------------------------------PhysicalOlapScan[household_demographics] -----------------------PhysicalProject -------------------------filter((store.s_store_name = 'ese')) ---------------------------PhysicalOlapScan[store] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF11 s_store_sk->[ss_store_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF10 hd_demo_sk->[ss_hdemo_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF9 t_time_sk->[ss_sold_time_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[store_sales] apply RFs: RF9 RF10 RF11 -----------------------------PhysicalProject -------------------------------filter((time_dim.t_hour = 10) and (time_dim.t_minute >= 30)) ---------------------------------PhysicalOlapScan[time_dim] -------------------------PhysicalProject ---------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -----------------------------PhysicalOlapScan[household_demographics] ---------------------PhysicalProject -----------------------filter((store.s_store_name = 'ese')) -------------------------PhysicalOlapScan[store] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF8 s_store_sk->[ss_store_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF7 hd_demo_sk->[ss_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF6 t_time_sk->[ss_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[store_sales] apply RFs: RF6 RF7 RF8 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute < 30)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((store.s_store_name = 'ese')) -----------------------PhysicalOlapScan[store] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF5 s_store_sk->[ss_store_sk] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ss_hdemo_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ss_sold_time_sk] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[store_sales] apply RFs: RF3 RF4 RF5 -------------------------PhysicalProject ---------------------------filter((time_dim.t_hour = 11) and (time_dim.t_minute >= 30)) -----------------------------PhysicalOlapScan[time_dim] ---------------------PhysicalProject -----------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -------------------------PhysicalOlapScan[household_demographics] -----------------PhysicalProject -------------------filter((store.s_store_name = 'ese')) ---------------------PhysicalOlapScan[store] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 12) and (time_dim.t_minute < 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_vehicle_count <= 5) and OR[AND[(household_demographics.hd_dep_count = 0),(household_demographics.hd_vehicle_count <= 2)],AND[(household_demographics.hd_dep_count = -1),(household_demographics.hd_vehicle_count <= 1)],(household_demographics.hd_dep_count = 3)] and hd_dep_count IN (-1, 0, 3)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query89.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query89.out deleted file mode 100644 index b8751687a0ff29..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query89.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_89 -- -PhysicalResultSink ---PhysicalProject -----PhysicalTopN[MERGE_SORT] -------PhysicalDistribute[DistributionSpecGather] ---------PhysicalTopN[LOCAL_SORT] -----------PhysicalProject -------------filter((if(( not (avg_monthly_sales = 0.0000)), (cast(abs((sum_sales - cast(avg_monthly_sales as DECIMALV3(38, 2)))) as DECIMALV3(38, 10)) / avg_monthly_sales), NULL) > 0.100000)) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ss_item_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[i_category IN ('Books', 'Children', 'Electronics'),i_class IN ('audio', 'history', 'school-uniforms')],AND[i_category IN ('Men', 'Shoes', 'Sports'),i_class IN ('pants', 'tennis', 'womens')]] and i_category IN ('Books', 'Children', 'Electronics', 'Men', 'Shoes', 'Sports') and i_class IN ('audio', 'history', 'pants', 'school-uniforms', 'tennis', 'womens')) ---------------------------------------------PhysicalOlapScan[item] -------------------------------------PhysicalProject ---------------------------------------filter((date_dim.d_year = 2001)) -----------------------------------------PhysicalOlapScan[date_dim] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query9.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query9.out deleted file mode 100644 index 06cd8f92785e08..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query9.out +++ /dev/null @@ -1,115 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_9 -- -PhysicalResultSink ---PhysicalDistribute[DistributionSpecGather] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------NestedLoopJoin[CROSS_JOIN] -----------NestedLoopJoin[CROSS_JOIN] -------------NestedLoopJoin[CROSS_JOIN] ---------------NestedLoopJoin[CROSS_JOIN] -----------------NestedLoopJoin[CROSS_JOIN] -------------------NestedLoopJoin[CROSS_JOIN] ---------------------NestedLoopJoin[CROSS_JOIN] -----------------------NestedLoopJoin[CROSS_JOIN] -------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------NestedLoopJoin[CROSS_JOIN] -------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------NestedLoopJoin[CROSS_JOIN] -----------------------------------PhysicalProject -------------------------------------NestedLoopJoin[CROSS_JOIN] ---------------------------------------PhysicalProject -----------------------------------------filter((reason.r_reason_sk = 1)) -------------------------------------------PhysicalOlapScan[reason] ---------------------------------------hashAgg[GLOBAL] -----------------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------------hashAgg[LOCAL] ---------------------------------------------PhysicalProject -----------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------------PhysicalOlapScan[store_sales] -----------------------------------hashAgg[GLOBAL] -------------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------------hashAgg[LOCAL] -----------------------------------------PhysicalProject -------------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) ---------------------------------------------PhysicalOlapScan[store_sales] ---------------------------------hashAgg[GLOBAL] -----------------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------------hashAgg[LOCAL] ---------------------------------------PhysicalProject -----------------------------------------filter((store_sales.ss_quantity <= 20) and (store_sales.ss_quantity >= 1)) -------------------------------------------PhysicalOlapScan[store_sales] -------------------------------hashAgg[GLOBAL] ---------------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------------hashAgg[LOCAL] -------------------------------------PhysicalProject ---------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -----------------------------------------PhysicalOlapScan[store_sales] -----------------------------hashAgg[GLOBAL] -------------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------------hashAgg[LOCAL] -----------------------------------PhysicalProject -------------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) ---------------------------------------PhysicalOlapScan[store_sales] ---------------------------hashAgg[GLOBAL] -----------------------------PhysicalDistribute[DistributionSpecGather] -------------------------------hashAgg[LOCAL] ---------------------------------PhysicalProject -----------------------------------filter((store_sales.ss_quantity <= 40) and (store_sales.ss_quantity >= 21)) -------------------------------------PhysicalOlapScan[store_sales] -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecGather] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -----------------------------------PhysicalOlapScan[store_sales] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) ---------------------------------PhysicalOlapScan[store_sales] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecGather] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((store_sales.ss_quantity <= 60) and (store_sales.ss_quantity >= 41)) -------------------------------PhysicalOlapScan[store_sales] -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecGather] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -----------------------------PhysicalOlapScan[store_sales] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) ---------------------------PhysicalOlapScan[store_sales] ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------filter((store_sales.ss_quantity <= 80) and (store_sales.ss_quantity >= 61)) -------------------------PhysicalOlapScan[store_sales] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -----------------------PhysicalOlapScan[store_sales] -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecGather] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) ---------------------PhysicalOlapScan[store_sales] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((store_sales.ss_quantity <= 100) and (store_sales.ss_quantity >= 81)) -------------------PhysicalOlapScan[store_sales] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query90.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query90.out deleted file mode 100644 index e5f91ba2a61448..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query90.out +++ /dev/null @@ -1,47 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_90 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----PhysicalProject -------NestedLoopJoin[CROSS_JOIN] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF5 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF4 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF3 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF3 RF4 RF5 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 13) and (time_dim.t_hour >= 12)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 6)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecGather] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_web_page_sk = web_page.wp_web_page_sk)) otherCondition=() build RFs:RF2 wp_web_page_sk->[ws_web_page_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_ship_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ws_ship_hdemo_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((web_sales.ws_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ws_sold_time_sk] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------PhysicalProject -----------------------------filter((time_dim.t_hour <= 15) and (time_dim.t_hour >= 14)) -------------------------------PhysicalOlapScan[time_dim] -----------------------PhysicalProject -------------------------filter((household_demographics.hd_dep_count = 6)) ---------------------------PhysicalOlapScan[household_demographics] -------------------PhysicalProject ---------------------filter((web_page.wp_char_count <= 5200) and (web_page.wp_char_count >= 5000)) -----------------------PhysicalOlapScan[web_page] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query91.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query91.out deleted file mode 100644 index 9d3c77acb23ca8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query91.out +++ /dev/null @@ -1,41 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_91 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF5 cc_call_center_sk->[cr_call_center_sk] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returned_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF4 d_date_sk->[cr_returned_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_returns.cr_returning_customer_sk = customer.c_customer_sk)) otherCondition=() build RFs:RF3 c_customer_sk->[cr_returning_customer_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[catalog_returns] apply RFs: RF3 RF4 RF5 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer_address.ca_address_sk = customer.c_current_addr_sk)) otherCondition=() build RFs:RF2 c_current_addr_sk->[ca_address_sk] ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_gmt_offset = -7.00)) -------------------------------------PhysicalOlapScan[customer_address] apply RFs: RF2 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((household_demographics.hd_demo_sk = customer.c_current_hdemo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[c_current_hdemo_sk] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer_demographics.cd_demo_sk = customer.c_current_cdemo_sk)) otherCondition=() build RFs:RF0 cd_demo_sk->[c_current_cdemo_sk] -----------------------------------------PhysicalProject -------------------------------------------PhysicalOlapScan[customer] apply RFs: RF0 RF1 -----------------------------------------PhysicalProject -------------------------------------------filter(OR[AND[(customer_demographics.cd_marital_status = 'M'),(customer_demographics.cd_education_status = 'Unknown')],AND[(customer_demographics.cd_marital_status = 'W'),(customer_demographics.cd_education_status = 'Advanced Degree')]] and cd_education_status IN ('Advanced Degree', 'Unknown') and cd_marital_status IN ('M', 'W')) ---------------------------------------------PhysicalOlapScan[customer_demographics] -------------------------------------PhysicalProject ---------------------------------------filter((hd_buy_potential like 'Unknown%')) -----------------------------------------PhysicalOlapScan[household_demographics] -------------------------PhysicalProject ---------------------------filter((date_dim.d_moy = 12) and (date_dim.d_year = 2000)) -----------------------------PhysicalOlapScan[date_dim] ---------------------PhysicalProject -----------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query92.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query92.out deleted file mode 100644 index 39a6db24528f22..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query92.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_92 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(ws_ext_discount_amt as DECIMALV3(38, 5)) > (1.3 * avg(cast(ws_ext_discount_amt as DECIMALV3(9, 4))) OVER(PARTITION BY i_item_sk)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((date_dim.d_date_sk = web_sales.ws_sold_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_sold_date_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((item.i_item_sk = web_sales.ws_item_sk)) otherCondition=() build RFs:RF0 i_item_sk->[ws_item_sk] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 -----------------------------PhysicalProject -------------------------------filter((item.i_manufact_id = 714)) ---------------------------------PhysicalOlapScan[item] -------------------------PhysicalProject ---------------------------filter((date_dim.d_date <= '2000-05-01') and (date_dim.d_date >= '2000-02-01')) -----------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query93.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query93.out deleted file mode 100644 index 5f2b776e674990..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query93.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_93 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((store_returns.sr_item_sk = store_sales.ss_item_sk) and (store_returns.sr_ticket_number = store_sales.ss_ticket_number)) otherCondition=() build RFs:RF1 sr_item_sk->[ss_item_sk];RF2 sr_ticket_number->[ss_ticket_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[store_sales] apply RFs: RF1 RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_returns.sr_reason_sk = reason.r_reason_sk)) otherCondition=() build RFs:RF0 r_reason_sk->[sr_reason_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_returns] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((reason.r_reason_desc = 'reason 58')) ---------------------------PhysicalOlapScan[reason] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query94.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query94.out deleted file mode 100644 index 0f35f2dc29e44e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query94.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_94 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[DISTINCT_GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[DISTINCT_LOCAL] -----------hashAgg[GLOBAL] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF4 ws_order_number->[ws_order_number] -------------------PhysicalProject ---------------------PhysicalOlapScan[web_sales] apply RFs: RF4 -------------------hashJoin[RIGHT_ANTI_JOIN shuffle] hashCondition=((ws1.ws_order_number = wr1.wr_order_number)) otherCondition=() build RFs:RF3 ws_order_number->[wr_order_number] ---------------------PhysicalProject -----------------------PhysicalOlapScan[web_returns] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF2 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF0 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF0 RF1 RF2 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'OK')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '2002-06-30') and (date_dim.d_date >= '2002-05-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query95.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query95.out deleted file mode 100644 index 21c6fa60d37b75..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query95.out +++ /dev/null @@ -1,43 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_95 -- -PhysicalCteAnchor ( cteId=CTEId#0 ) ---PhysicalCteProducer ( cteId=CTEId#0 ) -----PhysicalProject -------hashJoin[INNER_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws2.ws_order_number)) otherCondition=(( not (ws_warehouse_sk = ws_warehouse_sk))) build RFs:RF0 ws_order_number->[ws_order_number] ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF0 RF7 ---------PhysicalProject -----------PhysicalOlapScan[web_sales] apply RFs: RF7 ---PhysicalResultSink -----PhysicalTopN[GATHER_SORT] -------hashAgg[DISTINCT_GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[DISTINCT_LOCAL] -------------hashAgg[GLOBAL] ---------------hashAgg[LOCAL] -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((ws1.ws_order_number = web_returns.wr_order_number)) otherCondition=() build RFs:RF6 ws_order_number->[wr_order_number,ws_order_number] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffle] hashCondition=((web_returns.wr_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF5 wr_order_number->[ws_order_number] -----------------------PhysicalCteConsumer ( cteId=CTEId#0 ) apply RFs: RF5 RF6 -----------------------PhysicalProject -------------------------PhysicalOlapScan[web_returns] apply RFs: RF6 -------------------hashJoin[RIGHT_SEMI_JOIN shuffle] hashCondition=((ws1.ws_order_number = ws_wh.ws_order_number)) otherCondition=() build RFs:RF7 ws_order_number->[ws_order_number,ws_order_number] ---------------------PhysicalCteConsumer ( cteId=CTEId#0 ) ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_web_site_sk = web_site.web_site_sk)) otherCondition=() build RFs:RF3 web_site_sk->[ws_web_site_sk] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF2 d_date_sk->[ws_ship_date_sk] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((ws1.ws_ship_addr_sk = customer_address.ca_address_sk)) otherCondition=() build RFs:RF1 ca_address_sk->[ws_ship_addr_sk] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[web_sales] apply RFs: RF1 RF2 RF3 ---------------------------------PhysicalProject -----------------------------------filter((customer_address.ca_state = 'VA')) -------------------------------------PhysicalOlapScan[customer_address] -----------------------------PhysicalProject -------------------------------filter((date_dim.d_date <= '2001-05-31') and (date_dim.d_date >= '2001-04-01')) ---------------------------------PhysicalOlapScan[date_dim] -------------------------PhysicalProject ---------------------------filter((web_site.web_company_name = 'pri')) -----------------------------PhysicalOlapScan[web_site] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query96.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query96.out deleted file mode 100644 index b4b739a9bf444f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query96.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_96 -- -PhysicalResultSink ---PhysicalTopN[GATHER_SORT] -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_store_sk = store.s_store_sk)) otherCondition=() build RFs:RF2 s_store_sk->[ss_store_sk] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk)) otherCondition=() build RFs:RF1 hd_demo_sk->[ss_hdemo_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_time_sk = time_dim.t_time_sk)) otherCondition=() build RFs:RF0 t_time_sk->[ss_sold_time_sk] -----------------------PhysicalProject -------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 RF2 -----------------------PhysicalProject -------------------------filter((time_dim.t_hour = 8) and (time_dim.t_minute >= 30)) ---------------------------PhysicalOlapScan[time_dim] -------------------PhysicalProject ---------------------filter((household_demographics.hd_dep_count = 0)) -----------------------PhysicalOlapScan[household_demographics] ---------------PhysicalProject -----------------filter((store.s_store_name = 'ese')) -------------------PhysicalOlapScan[store] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query97.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query97.out deleted file mode 100644 index d3a845763241f7..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query97.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_97 -- -PhysicalResultSink ---PhysicalLimit[GLOBAL] -----PhysicalLimit[LOCAL] -------hashAgg[GLOBAL] ---------PhysicalDistribute[DistributionSpecGather] -----------hashAgg[LOCAL] -------------PhysicalProject ---------------hashJoin[FULL_OUTER_JOIN colocated] hashCondition=((ssci.customer_sk = csci.customer_sk) and (ssci.item_sk = csci.item_sk)) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF1 d_date_sk->[ss_sold_date_sk] -----------------------------PhysicalProject -------------------------------filter(( not ss_sold_date_sk IS NULL)) ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1210) and (date_dim.d_month_seq >= 1199)) ---------------------------------PhysicalOlapScan[date_dim] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_sold_date_sk] -----------------------------PhysicalProject -------------------------------filter(( not cs_sold_date_sk IS NULL)) ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((date_dim.d_month_seq <= 1210) and (date_dim.d_month_seq >= 1199)) ---------------------------------PhysicalOlapScan[date_dim] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query98.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query98.out deleted file mode 100644 index beb47b1d23d9dd..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query98.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_98 -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------PhysicalWindow -------------PhysicalQuickSort[LOCAL_SORT] ---------------PhysicalDistribute[DistributionSpecHash] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_item_sk = item.i_item_sk)) otherCondition=() build RFs:RF1 i_item_sk->[ss_item_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((store_sales.ss_sold_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[ss_sold_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[store_sales] apply RFs: RF0 RF1 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_date <= '1999-03-07') and (date_dim.d_date >= '1999-02-05')) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------filter(i_category IN ('Jewelry', 'Men', 'Sports')) -------------------------------PhysicalOlapScan[item] - diff --git a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query99.out b/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query99.out deleted file mode 100644 index de639b9015342e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpcds_sf1000/shape/query99.out +++ /dev/null @@ -1,29 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !ds_shape_99 -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_call_center_sk = call_center.cc_call_center_sk)) otherCondition=() build RFs:RF3 cc_call_center_sk->[cs_call_center_sk] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_mode_sk = ship_mode.sm_ship_mode_sk)) otherCondition=() build RFs:RF2 sm_ship_mode_sk->[cs_ship_mode_sk] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_warehouse_sk = warehouse.w_warehouse_sk)) otherCondition=() build RFs:RF1 w_warehouse_sk->[cs_warehouse_sk] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((catalog_sales.cs_ship_date_sk = date_dim.d_date_sk)) otherCondition=() build RFs:RF0 d_date_sk->[cs_ship_date_sk] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[catalog_sales] apply RFs: RF0 RF1 RF2 RF3 -------------------------------PhysicalProject ---------------------------------filter((date_dim.d_month_seq <= 1205) and (date_dim.d_month_seq >= 1194)) -----------------------------------PhysicalOlapScan[date_dim] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[warehouse] -----------------------PhysicalProject -------------------------PhysicalOlapScan[ship_mode] -------------------PhysicalProject ---------------------PhysicalOlapScan[call_center] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.out deleted file mode 100644 index 22f0777694a7ce..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate <= '1998-09-02')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.out deleted file mode 100644 index a532a3f74c5395..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -----------------------PhysicalProject -------------------------filter((lineitem.l_returnflag = 'R')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) -------------------------------PhysicalOlapScan[orders] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] -------------------PhysicalProject ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.out deleted file mode 100644 index 6d414f1f56a4f4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF3 n_nationkey->[s_nationkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[ps_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] apply RFs: RF2 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((nation.n_name = 'GERMANY')) -------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[ps_suppkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.out deleted file mode 100644 index 8df830dd428e58..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.out +++ /dev/null @@ -1,17 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.out deleted file mode 100644 index 19361de35a19df..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.out deleted file mode 100644 index 53fedeb6ad34c5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] ---------------PhysicalProject -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.out deleted file mode 100644 index 7b04caaf3e087a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((s_comment like '%Customer%Complaints%')) -------------------------PhysicalOlapScan[supplier] -------------------PhysicalProject ---------------------filter(( not (p_brand = 'Brand#45')) and ( not (p_type like 'MEDIUM POLISHED%')) and p_size IN (14, 19, 23, 3, 36, 45, 49, 9)) -----------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.out deleted file mode 100644 index 92cc8c93708400..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) ---------------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.out deleted file mode 100644 index 79164f3a7abb0d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[orders] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((sum(l_quantity) > 300.00)) -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.out deleted file mode 100644 index 78faf3234691b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) build RFs:RF0 p_partkey->[l_partkey] -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.out deleted file mode 100644 index c1a68c315e06b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((partsupp.ps_supplycost = min(ps_supplycost) OVER(PARTITION BY p_partkey))) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF3 r_regionkey->[n_regionkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[ps_suppkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((p_type like '%BRASS') and (part.p_size = 15)) -------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalOlapScan[supplier] apply RFs: RF2 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[nation] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((region.r_name = 'EUROPE')) -------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.out deleted file mode 100644 index 89548468b7c1ae..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey,ps_suppkey] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ---------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF3 -----------------------PhysicalProject -------------------------filter((p_name like 'forest%')) ---------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF4 -------------PhysicalProject ---------------filter((nation.n_name = 'CANADA')) -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.out deleted file mode 100644 index 7678db3199aef2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey,ps_suppkey] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ---------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF3 -----------------------PhysicalProject -------------------------filter((p_name like 'forest%')) ---------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF4 -------------PhysicalProject ---------------filter((nation.n_name = 'CANADA')) -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.out deleted file mode 100644 index c54a6b502f590d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = l1.l_orderkey)) otherCondition=() build RFs:RF3 o_orderkey->[l_orderkey,l_orderkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = l1.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((l2.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF1 l_orderkey->[l_orderkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF3 -----------------------------hashJoin[RIGHT_ANTI_JOIN colocated] hashCondition=((l3.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF0 l_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------filter((l3.l_receiptdate > l3.l_commitdate)) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((l1.l_receiptdate > l1.l_commitdate)) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------filter((orders.o_orderstatus = 'F')) ---------------------------PhysicalOlapScan[orders] -------------------PhysicalProject ---------------------filter((nation.n_name = 'SAUDI ARABIA')) -----------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.out deleted file mode 100644 index 63d82280b35b16..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_ANTI_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------NestedLoopJoin[INNER_JOIN](cast(c_acctbal as DECIMALV3(38, 4)) > avg(cast(c_acctbal as DECIMALV3(17, 4)))) -----------------------PhysicalProject -------------------------filter(substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------PhysicalOlapScan[customer] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((customer.c_acctbal > 0.00) and substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.out deleted file mode 100644 index 48d4e37ec466e3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF1 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.out deleted file mode 100644 index 19b73f24dc3315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.out +++ /dev/null @@ -1,18 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.out deleted file mode 100644 index 55a5eab9536c60..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF5 r_regionkey->[n_regionkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[c_nationkey,s_nationkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = supplier.s_nationkey) and (lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -----------------------------------PhysicalProject -------------------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) ---------------------------------------PhysicalOlapScan[orders] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF4 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] apply RFs: RF5 -------------------PhysicalProject ---------------------filter((region.r_name = 'ASIA')) -----------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.out deleted file mode 100644 index f1f764bec09499..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((lineitem.l_discount <= 0.07) and (lineitem.l_discount >= 0.05) and (lineitem.l_quantity < 24.00) and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.out deleted file mode 100644 index b98149f8668a1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey) and (supplier.s_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF3 n_nationkey->[c_nationkey];RF4 n_nationkey->[s_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF2 c_custkey->[o_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF1 l_orderkey->[o_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[l_suppkey] -------------------------------PhysicalProject ---------------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------NestedLoopJoin[INNER_JOIN]OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]] ---------------------PhysicalProject -----------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------PhysicalOlapScan[nation] ---------------------PhysicalProject -----------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.out deleted file mode 100644 index 36e6e6b1ee6c61..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF6 r_regionkey->[n_regionkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[c_nationkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF3 c_custkey->[o_custkey] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 RF1 ---------------------------------------------PhysicalProject -----------------------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------------------PhysicalOlapScan[orders] apply RFs: RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) ---------------------------------------------PhysicalOlapScan[part] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[supplier] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[nation] apply RFs: RF6 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[nation] ---------------------PhysicalProject -----------------------filter((region.r_name = 'AMERICA')) -------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.out b/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.out deleted file mode 100644 index b60760ec115acb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF4 p_partkey->[l_partkey,ps_partkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF4 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[orders] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[partsupp] apply RFs: RF4 -----------------------PhysicalProject -------------------------filter((p_name like '%green%')) ---------------------------PhysicalOlapScan[part] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() -----------------------PhysicalProject -------------------------PhysicalOlapScan[supplier] -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q1.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q1.out deleted file mode 100644 index 22f0777694a7ce..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q1.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate <= '1998-09-02')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q10.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q10.out deleted file mode 100644 index e47bc37df32324..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q10.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 o_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF1 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -----------------------PhysicalProject -------------------------filter((lineitem.l_returnflag = 'R')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) ---------------------------PhysicalOlapScan[orders] ---------------PhysicalProject -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q11.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q11.out deleted file mode 100644 index 40df7553b0352f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q11.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[ps_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] apply RFs: RF1 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((nation.n_name = 'GERMANY')) ---------------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q12.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q12.out deleted file mode 100644 index 8df830dd428e58..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q12.out +++ /dev/null @@ -1,17 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q13.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q13.out deleted file mode 100644 index 19361de35a19df..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q13.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q14.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q14.out deleted file mode 100644 index 6df1a05fa3b57f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q14.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 l_partkey->[p_partkey] ---------------PhysicalProject -----------------PhysicalOlapScan[part] apply RFs: RF0 ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q15.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q15.out deleted file mode 100644 index 1d3d780435aab3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q15.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() build RFs:RF0 supplier_no->[s_suppkey] -------------PhysicalProject ---------------PhysicalOlapScan[supplier] apply RFs: RF0 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q16.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q16.out deleted file mode 100644 index f04b0bc766338b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q16.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter(( not (p_brand = 'Brand#45')) and ( not (p_type like 'MEDIUM POLISHED%')) and p_size IN (14, 19, 23, 3, 36, 45, 49, 9)) -------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------filter((s_comment like '%Customer%Complaints%')) ---------------------PhysicalOlapScan[supplier] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q17.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q17.out deleted file mode 100644 index 850c567ab4aa39..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q17.out +++ /dev/null @@ -1,20 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) -----------------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q18.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q18.out deleted file mode 100644 index e4c5acd49e3fb3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q18.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 o_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF1 -------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[orders] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((sum(l_quantity) > 300.00)) -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q19.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q19.out deleted file mode 100644 index 78faf3234691b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q19.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) build RFs:RF0 p_partkey->[l_partkey] -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q2.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q2.out deleted file mode 100644 index fc7b45224906e4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q2.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((partsupp.ps_supplycost = min(ps_supplycost) OVER(PARTITION BY p_partkey))) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF2 p_partkey->[ps_partkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[partsupp] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((p_type like '%BRASS') and (part.p_size = 15)) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] ---------------------------PhysicalOlapScan[supplier] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'EUROPE')) -----------------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.out deleted file mode 100644 index 6b7a6da490ce49..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 -----------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] apply RFs: RF1 RF4 -------------------PhysicalProject ---------------------filter((p_name like 'forest%')) -----------------------PhysicalOlapScan[part] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------PhysicalProject -------------------filter((nation.n_name = 'CANADA')) ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20.out deleted file mode 100644 index 6b3b115fcfee2f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q20.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 -----------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] apply RFs: RF1 RF4 -------------------PhysicalProject ---------------------filter((p_name like 'forest%')) -----------------------PhysicalOlapScan[part] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------PhysicalProject -------------------filter((nation.n_name = 'CANADA')) ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q21.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q21.out deleted file mode 100644 index 0436a7b245b174..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q21.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((l2.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF4 l_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[lineitem] apply RFs: RF4 -------------------hashJoin[RIGHT_ANTI_JOIN colocated] hashCondition=((l3.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF3 l_orderkey->[l_orderkey] ---------------------PhysicalProject -----------------------filter((l3.l_receiptdate > l3.l_commitdate)) -------------------------PhysicalOlapScan[lineitem] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = l1.l_orderkey)) otherCondition=() build RFs:RF2 l_orderkey->[o_orderkey] -------------------------PhysicalProject ---------------------------filter((orders.o_orderstatus = 'F')) -----------------------------PhysicalOlapScan[orders] apply RFs: RF2 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = l1.l_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[l_suppkey] -----------------------------PhysicalProject -------------------------------filter((l1.l_receiptdate > l1.l_commitdate)) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[supplier] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter((nation.n_name = 'SAUDI ARABIA')) -------------------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q22.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q22.out deleted file mode 100644 index 63d82280b35b16..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q22.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_ANTI_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------NestedLoopJoin[INNER_JOIN](cast(c_acctbal as DECIMALV3(38, 4)) > avg(cast(c_acctbal as DECIMALV3(17, 4)))) -----------------------PhysicalProject -------------------------filter(substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------PhysicalOlapScan[customer] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((customer.c_acctbal > 0.00) and substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q3.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q3.out deleted file mode 100644 index 36d395afccaa2d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q3.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF1 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q4.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q4.out deleted file mode 100644 index 19b73f24dc3315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q4.out +++ /dev/null @@ -1,18 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q5.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q5.out deleted file mode 100644 index 2f45b1e87b401d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q5.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey) and (customer.c_nationkey = supplier.s_nationkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[orders] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'ASIA')) -----------------------------------PhysicalOlapScan[region] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q6.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q6.out deleted file mode 100644 index f1f764bec09499..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q6.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((lineitem.l_discount <= 0.07) and (lineitem.l_discount >= 0.05) and (lineitem.l_quantity < 24.00) and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q7.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q7.out deleted file mode 100644 index 957b17a7402749..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q7.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) build RFs:RF4 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF3 l_orderkey->[o_orderkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[orders] apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[supplier] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------filter(n_name IN ('FRANCE', 'GERMANY')) -----------------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[c_nationkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter(n_name IN ('FRANCE', 'GERMANY')) ---------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q8.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q8.out deleted file mode 100644 index fc46b49054b927..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q8.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF5 l_suppkey->[s_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF5 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF4 c_custkey->[o_custkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF3 l_orderkey->[o_orderkey] ---------------------------------PhysicalProject -----------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------PhysicalOlapScan[orders] apply RFs: RF3 RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF2 p_partkey->[l_partkey] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 -------------------------------------PhysicalProject ---------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) -----------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[c_nationkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((region.r_name = 'AMERICA')) -----------------------------------------PhysicalOlapScan[region] ---------------------PhysicalProject -----------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q9.out b/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q9.out deleted file mode 100644 index d70d5886607341..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/rf_prune/q9.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 l_orderkey->[o_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------filter((p_name like '%green%')) -----------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.out b/regression-test/data/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.out deleted file mode 100644 index ba4c37059cb12c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.out +++ /dev/null @@ -1,36 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !rf_setop -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((T.l_linenumber = expr_cast(r_regionkey as BIGINT))) otherCondition=() build RFs:RF0 expr_cast(r_regionkey as BIGINT)->[cast(l_linenumber as BIGINT),o_orderkey] -------------PhysicalExcept ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------PhysicalOlapScan[lineitem] apply RFs: RF0 ---------------PhysicalDistribute[DistributionSpecHash] -----------------PhysicalProject -------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------PhysicalProject ---------------PhysicalOlapScan[region] - --- !rf_setop_expr -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((expr_abs(l_linenumber) = expr_cast(r_regionkey as LARGEINT))) otherCondition=() build RFs:RF0 expr_cast(r_regionkey as LARGEINT)->[abs(cast(l_linenumber as BIGINT)),abs(o_orderkey)] -------------PhysicalProject ---------------PhysicalExcept -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------PhysicalProject ---------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q1.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q1.out deleted file mode 100644 index 22f0777694a7ce..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q1.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate <= '1998-09-02')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q10.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q10.out deleted file mode 100644 index c13794f49a1c6e..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q10.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[c_nationkey] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 o_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF1 RF2 -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -----------------------PhysicalProject -------------------------filter((lineitem.l_returnflag = 'R')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) ---------------------------PhysicalOlapScan[orders] ---------------PhysicalProject -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q11.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q11.out deleted file mode 100644 index 40df7553b0352f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q11.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF2 -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[ps_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] apply RFs: RF1 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------filter((nation.n_name = 'GERMANY')) ---------------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q12.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q12.out deleted file mode 100644 index 8df830dd428e58..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q12.out +++ /dev/null @@ -1,17 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q13.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q13.out deleted file mode 100644 index 5d94f6d22b76d8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q13.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q14.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q14.out deleted file mode 100644 index 6df1a05fa3b57f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q14.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 l_partkey->[p_partkey] ---------------PhysicalProject -----------------PhysicalOlapScan[part] apply RFs: RF0 ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q15.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q15.out deleted file mode 100644 index 1d3d780435aab3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q15.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() build RFs:RF0 supplier_no->[s_suppkey] -------------PhysicalProject ---------------PhysicalOlapScan[supplier] apply RFs: RF0 -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecGather] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------hashAgg[GLOBAL] ---------------------------PhysicalDistribute[DistributionSpecHash] -----------------------------hashAgg[LOCAL] -------------------------------PhysicalProject ---------------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q16.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q16.out deleted file mode 100644 index f04b0bc766338b..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q16.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() -----------------PhysicalProject -------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter(( not (p_brand = 'Brand#45')) and ( not (p_type like 'MEDIUM POLISHED%')) and p_size IN (14, 19, 23, 3, 36, 45, 49, 9)) -------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------filter((s_comment like '%Customer%Complaints%')) ---------------------PhysicalOlapScan[supplier] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q17.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q17.out deleted file mode 100644 index 850c567ab4aa39..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q17.out +++ /dev/null @@ -1,20 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------PhysicalProject ---------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) -----------------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q18.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q18.out deleted file mode 100644 index e4c5acd49e3fb3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q18.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 o_custkey->[c_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] apply RFs: RF1 -------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[orders] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((sum(l_quantity) > 300.00)) -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q19.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q19.out deleted file mode 100644 index 78faf3234691b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q19.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) build RFs:RF0 p_partkey->[l_partkey] -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q2.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q2.out deleted file mode 100644 index fc7b45224906e4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q2.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((partsupp.ps_supplycost = min(ps_supplycost) OVER(PARTITION BY p_partkey))) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalDistribute[DistributionSpecHash] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[ps_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF2 p_partkey->[ps_partkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[partsupp] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((p_type like '%BRASS') and (part.p_size = 15)) -------------------------------PhysicalOlapScan[part] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] ---------------------------PhysicalOlapScan[supplier] apply RFs: RF1 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'EUROPE')) -----------------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.out deleted file mode 100644 index 6b7a6da490ce49..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 -----------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] apply RFs: RF1 RF4 -------------------PhysicalProject ---------------------filter((p_name like 'forest%')) -----------------------PhysicalOlapScan[part] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------PhysicalProject -------------------filter((nation.n_name = 'CANADA')) ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20.out deleted file mode 100644 index 6b3b115fcfee2f..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q20.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF4 s_suppkey->[l_suppkey,ps_suppkey] -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF2 ps_partkey->[l_partkey];RF3 ps_suppkey->[l_suppkey] -----------------hashAgg[GLOBAL] -------------------PhysicalDistribute[DistributionSpecHash] ---------------------hashAgg[LOCAL] -----------------------PhysicalProject -------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 RF4 -----------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF1 p_partkey->[ps_partkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] apply RFs: RF1 RF4 -------------------PhysicalProject ---------------------filter((p_name like 'forest%')) -----------------------PhysicalOlapScan[part] -------------PhysicalProject ---------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------PhysicalProject -------------------filter((nation.n_name = 'CANADA')) ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q21.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q21.out deleted file mode 100644 index 0436a7b245b174..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q21.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((l2.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF4 l_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[lineitem] apply RFs: RF4 -------------------hashJoin[RIGHT_ANTI_JOIN colocated] hashCondition=((l3.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF3 l_orderkey->[l_orderkey] ---------------------PhysicalProject -----------------------filter((l3.l_receiptdate > l3.l_commitdate)) -------------------------PhysicalOlapScan[lineitem] apply RFs: RF3 ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = l1.l_orderkey)) otherCondition=() build RFs:RF2 l_orderkey->[o_orderkey] -------------------------PhysicalProject ---------------------------filter((orders.o_orderstatus = 'F')) -----------------------------PhysicalOlapScan[orders] apply RFs: RF2 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = l1.l_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[l_suppkey] -----------------------------PhysicalProject -------------------------------filter((l1.l_receiptdate > l1.l_commitdate)) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[supplier] apply RFs: RF0 ---------------------------------PhysicalProject -----------------------------------filter((nation.n_name = 'SAUDI ARABIA')) -------------------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q22.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q22.out deleted file mode 100644 index 63d82280b35b16..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q22.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_ANTI_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------NestedLoopJoin[INNER_JOIN](cast(c_acctbal as DECIMALV3(38, 4)) > avg(cast(c_acctbal as DECIMALV3(17, 4)))) -----------------------PhysicalProject -------------------------filter(substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------PhysicalOlapScan[customer] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((customer.c_acctbal > 0.00) and substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q3.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q3.out deleted file mode 100644 index 36d395afccaa2d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q3.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF1 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q4.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q4.out deleted file mode 100644 index 19b73f24dc3315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q4.out +++ /dev/null @@ -1,18 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q5.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q5.out deleted file mode 100644 index f2ebd3c8b31b86..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q5.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey) and (customer.c_nationkey = supplier.s_nationkey)) otherCondition=() build RFs:RF4 c_nationkey->[n_nationkey,s_nationkey];RF5 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[orders] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF1 RF4 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[nation] apply RFs: RF0 RF4 -------------------------------PhysicalProject ---------------------------------filter((region.r_name = 'ASIA')) -----------------------------------PhysicalOlapScan[region] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q6.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q6.out deleted file mode 100644 index f1f764bec09499..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q6.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((lineitem.l_discount <= 0.07) and (lineitem.l_discount >= 0.05) and (lineitem.l_quantity < 24.00) and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q7.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q7.out deleted file mode 100644 index 957b17a7402749..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q7.out +++ /dev/null @@ -1,35 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) build RFs:RF4 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF3 l_orderkey->[o_orderkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[orders] apply RFs: RF3 RF4 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[supplier] apply RFs: RF1 -------------------------------PhysicalProject ---------------------------------filter(n_name IN ('FRANCE', 'GERMANY')) -----------------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[c_nationkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter(n_name IN ('FRANCE', 'GERMANY')) ---------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q8.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q8.out deleted file mode 100644 index ef2b3523bbbe98..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q8.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() build RFs:RF6 n_nationkey->[s_nationkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF5 l_suppkey->[s_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF5 RF6 -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF4 c_custkey->[o_custkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF3 l_orderkey->[o_orderkey] ---------------------------------PhysicalProject -----------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------PhysicalOlapScan[orders] apply RFs: RF3 RF4 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF2 p_partkey->[l_partkey] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 -------------------------------------PhysicalProject ---------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) -----------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[c_nationkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF1 ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF0 r_regionkey->[n_regionkey] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[nation] apply RFs: RF0 -------------------------------------PhysicalProject ---------------------------------------filter((region.r_name = 'AMERICA')) -----------------------------------------PhysicalOlapScan[region] ---------------------PhysicalProject -----------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q9.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q9.out deleted file mode 100644 index cd77e18b02c916..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape/q9.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF4 ps_suppkey->[l_suppkey,s_suppkey];RF5 ps_partkey->[l_partkey,p_partkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 l_orderkey->[o_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] apply RFs: RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF3 RF4 RF5 -------------------------------PhysicalProject ---------------------------------filter((p_name like '%green%')) -----------------------------------PhysicalOlapScan[part] apply RFs: RF5 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF0 RF4 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[nation] -------------------PhysicalProject ---------------------PhysicalOlapScan[partsupp] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.out deleted file mode 100644 index 22f0777694a7ce..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.out +++ /dev/null @@ -1,13 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate <= '1998-09-02')) -------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.out deleted file mode 100644 index 0820ef5c2c6526..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.out +++ /dev/null @@ -1,26 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[c_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -----------------------PhysicalProject -------------------------filter((lineitem.l_returnflag = 'R')) ---------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] ---------------------------PhysicalProject -----------------------------filter((orders.o_orderdate < '1994-01-01') and (orders.o_orderdate >= '1993-10-01')) -------------------------------PhysicalOlapScan[orders] apply RFs: RF0 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[customer] apply RFs: RF2 -------------------PhysicalProject ---------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.out deleted file mode 100644 index 6d414f1f56a4f4..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.out +++ /dev/null @@ -1,37 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------NestedLoopJoin[INNER_JOIN](cast(value as DOUBLE) > cast((sum((ps_supplycost * cast(ps_availqty as DECIMALV3(10, 0)))) * 0.000002) as DOUBLE)) -------------PhysicalProject ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF3 n_nationkey->[s_nationkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[ps_suppkey] -------------------------PhysicalProject ---------------------------PhysicalOlapScan[partsupp] apply RFs: RF2 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[supplier] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((nation.n_name = 'GERMANY')) -------------------------PhysicalOlapScan[nation] -------------PhysicalProject ---------------hashAgg[GLOBAL] -----------------PhysicalDistribute[DistributionSpecGather] -------------------hashAgg[LOCAL] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF1 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[ps_suppkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[supplier] apply RFs: RF1 -------------------------PhysicalProject ---------------------------filter((nation.n_name = 'GERMANY')) -----------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.out deleted file mode 100644 index 8df830dd428e58..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.out +++ /dev/null @@ -1,17 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate) and (lineitem.l_receiptdate < '1995-01-01') and (lineitem.l_receiptdate >= '1994-01-01') and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate < lineitem.l_commitdate) and l_shipmode IN ('MAIL', 'SHIP')) -----------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.out deleted file mode 100644 index 5d94f6d22b76d8..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashJoin[RIGHT_OUTER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -----------------------PhysicalProject -------------------------filter(( not (o_comment like '%special%requests%'))) ---------------------------PhysicalOlapScan[orders] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.out deleted file mode 100644 index d6457aadcccc10..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate < '1995-10-01') and (lineitem.l_shipdate >= '1995-09-01')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF0 ---------------PhysicalProject -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.out deleted file mode 100644 index e9b45b5888ce54..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((revenue0.total_revenue = max(total_revenue))) otherCondition=() -------------PhysicalProject ---------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() build RFs:RF0 s_suppkey->[l_suppkey] -----------------PhysicalProject -------------------hashAgg[GLOBAL] ---------------------PhysicalDistribute[DistributionSpecHash] -----------------------hashAgg[LOCAL] -------------------------PhysicalProject ---------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -----------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] -------------hashAgg[GLOBAL] ---------------PhysicalDistribute[DistributionSpecGather] -----------------hashAgg[LOCAL] -------------------PhysicalProject ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) -------------------------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.out deleted file mode 100644 index 7b04caaf3e087a..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -------------------hashJoin[LEFT_ANTI_JOIN broadcast] hashCondition=((partsupp.ps_suppkey = supplier.s_suppkey)) otherCondition=() ---------------------PhysicalProject -----------------------PhysicalOlapScan[partsupp] apply RFs: RF0 ---------------------PhysicalProject -----------------------filter((s_comment like '%Customer%Complaints%')) -------------------------PhysicalOlapScan[supplier] -------------------PhysicalProject ---------------------filter(( not (p_brand = 'Brand#45')) and ( not (p_type like 'MEDIUM POLISHED%')) and p_size IN (14, 19, 23, 3, 36, 45, 49, 9)) -----------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.out deleted file mode 100644 index 92cc8c93708400..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.out +++ /dev/null @@ -1,19 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalProject -----hashAgg[GLOBAL] -------PhysicalDistribute[DistributionSpecGather] ---------hashAgg[LOCAL] -----------PhysicalProject -------------filter((cast(l_quantity as DECIMALV3(38, 5)) < (0.2 * avg(cast(l_quantity as DECIMALV3(17, 4))) OVER(PARTITION BY p_partkey)))) ---------------PhysicalWindow -----------------PhysicalQuickSort[LOCAL_SORT] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF0 p_partkey->[l_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -----------------------PhysicalProject -------------------------filter((part.p_brand = 'Brand#23') and (part.p_container = 'MED BOX')) ---------------------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.out deleted file mode 100644 index 44c12faa6d47d5..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.out +++ /dev/null @@ -1,24 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF2 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------PhysicalOlapScan[lineitem] apply RFs: RF2 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF1 c_custkey->[o_custkey] -------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF0 l_orderkey->[o_orderkey] ---------------------PhysicalProject -----------------------PhysicalOlapScan[orders] apply RFs: RF0 RF1 ---------------------PhysicalProject -----------------------filter((sum(l_quantity) > 300.00)) -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[lineitem] -------------------PhysicalProject ---------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.out deleted file mode 100644 index 78faf3234691b3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.out +++ /dev/null @@ -1,15 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=(OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(lineitem.l_quantity <= 11.00),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(lineitem.l_quantity >= 10.00),(lineitem.l_quantity <= 20.00),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG'),(lineitem.l_quantity >= 20.00)]]) build RFs:RF0 p_partkey->[l_partkey] -------------PhysicalProject ---------------filter((lineitem.l_quantity <= 30.00) and (lineitem.l_quantity >= 1.00) and (lineitem.l_shipinstruct = 'DELIVER IN PERSON') and l_shipmode IN ('AIR REG', 'AIR')) -----------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------PhysicalProject ---------------filter((part.p_size <= 15) and (part.p_size >= 1) and OR[AND[(part.p_brand = 'Brand#12'),p_container IN ('SM BOX', 'SM CASE', 'SM PACK', 'SM PKG'),(part.p_size <= 5)],AND[(part.p_brand = 'Brand#23'),p_container IN ('MED BAG', 'MED BOX', 'MED PACK', 'MED PKG'),(part.p_size <= 10)],AND[(part.p_brand = 'Brand#34'),p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG')]] and p_brand IN ('Brand#12', 'Brand#23', 'Brand#34') and p_container IN ('LG BOX', 'LG CASE', 'LG PACK', 'LG PKG', 'MED BAG', 'MED BOX', 'MED PACK', 'MED PKG', 'SM BOX', 'SM CASE', 'SM PACK', 'SM PKG')) -----------------PhysicalOlapScan[part] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.out deleted file mode 100644 index c1a68c315e06b2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------PhysicalProject -----------filter((partsupp.ps_supplycost = min(ps_supplycost) OVER(PARTITION BY p_partkey))) -------------PhysicalWindow ---------------PhysicalQuickSort[LOCAL_SORT] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF3 r_regionkey->[n_regionkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF2 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF1 s_suppkey->[ps_suppkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = partsupp.ps_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF1 ---------------------------------PhysicalProject -----------------------------------filter((p_type like '%BRASS') and (part.p_size = 15)) -------------------------------------PhysicalOlapScan[part] -----------------------------PhysicalOlapScan[supplier] apply RFs: RF2 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[nation] apply RFs: RF3 ---------------------PhysicalProject -----------------------filter((region.r_name = 'EUROPE')) -------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.out deleted file mode 100644 index 89548468b7c1ae..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.out +++ /dev/null @@ -1,31 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = t3.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey,ps_suppkey] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((t2.l_partkey = t1.ps_partkey) and (t2.l_suppkey = t1.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > t2.l_q)) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] ---------------------PhysicalProject -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecHash] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ---------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF3 -----------------------PhysicalProject -------------------------filter((p_name like 'forest%')) ---------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF4 -------------PhysicalProject ---------------filter((nation.n_name = 'CANADA')) -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.out deleted file mode 100644 index 7678db3199aef2..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.out +++ /dev/null @@ -1,30 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------PhysicalProject ---------------hashJoin[RIGHT_SEMI_JOIN shuffleBucket] hashCondition=((supplier.s_suppkey = partsupp.ps_suppkey)) otherCondition=() build RFs:RF3 s_suppkey->[l_suppkey,ps_suppkey] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = partsupp.ps_partkey) and (lineitem.l_suppkey = partsupp.ps_suppkey)) otherCondition=((cast(ps_availqty as DECIMALV3(38, 3)) > (0.5 * sum(l_quantity)))) build RFs:RF1 ps_partkey->[l_partkey];RF2 ps_suppkey->[l_suppkey] ---------------------hashAgg[GLOBAL] -----------------------PhysicalDistribute[DistributionSpecHash] -------------------------hashAgg[LOCAL] ---------------------------PhysicalProject -----------------------------filter((lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 ---------------------hashJoin[LEFT_SEMI_JOIN colocated] hashCondition=((partsupp.ps_partkey = part.p_partkey)) otherCondition=() build RFs:RF0 p_partkey->[ps_partkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[partsupp] apply RFs: RF0 RF3 -----------------------PhysicalProject -------------------------filter((p_name like 'forest%')) ---------------------------PhysicalOlapScan[part] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] apply RFs: RF4 -------------PhysicalProject ---------------filter((nation.n_name = 'CANADA')) -----------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.out deleted file mode 100644 index c54a6b502f590d..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[s_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = l1.l_orderkey)) otherCondition=() build RFs:RF3 o_orderkey->[l_orderkey,l_orderkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = l1.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] ---------------------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((l2.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF1 l_orderkey->[l_orderkey] -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF3 -----------------------------hashJoin[RIGHT_ANTI_JOIN colocated] hashCondition=((l3.l_orderkey = l1.l_orderkey)) otherCondition=(( not (l_suppkey = l_suppkey))) build RFs:RF0 l_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------filter((l3.l_receiptdate > l3.l_commitdate)) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------filter((l1.l_receiptdate > l1.l_commitdate)) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF2 RF3 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------filter((orders.o_orderstatus = 'F')) ---------------------------PhysicalOlapScan[orders] -------------------PhysicalProject ---------------------filter((nation.n_name = 'SAUDI ARABIA')) -----------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.out deleted file mode 100644 index 63d82280b35b16..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.out +++ /dev/null @@ -1,25 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_ANTI_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------NestedLoopJoin[INNER_JOIN](cast(c_acctbal as DECIMALV3(38, 4)) > avg(cast(c_acctbal as DECIMALV3(17, 4)))) -----------------------PhysicalProject -------------------------filter(substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------PhysicalOlapScan[customer] -----------------------hashAgg[GLOBAL] -------------------------PhysicalDistribute[DistributionSpecGather] ---------------------------hashAgg[LOCAL] -----------------------------PhysicalProject -------------------------------filter((customer.c_acctbal > 0.00) and substring(c_phone, 1, 2) IN ('13', '17', '18', '23', '29', '30', '31')) ---------------------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.out deleted file mode 100644 index 48d4e37ec466e3..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.out +++ /dev/null @@ -1,21 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalTopN[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalTopN[LOCAL_SORT] ---------hashAgg[LOCAL] -----------PhysicalProject -------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] ---------------PhysicalProject -----------------filter((lineitem.l_shipdate > '1995-03-15')) -------------------PhysicalOlapScan[lineitem] apply RFs: RF1 ---------------PhysicalProject -----------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1995-03-15')) -----------------------PhysicalOlapScan[orders] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((customer.c_mktsegment = 'BUILDING')) -----------------------PhysicalOlapScan[customer] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.out deleted file mode 100644 index 19b73f24dc3315..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.out +++ /dev/null @@ -1,18 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[RIGHT_SEMI_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] -------------------PhysicalProject ---------------------filter((lineitem.l_commitdate < lineitem.l_receiptdate)) -----------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------PhysicalProject ---------------------filter((orders.o_orderdate < '1993-10-01') and (orders.o_orderdate >= '1993-07-01')) -----------------------PhysicalOlapScan[orders] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.out deleted file mode 100644 index 640dabeb70f0bb..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((nation.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF5 r_regionkey->[n_regionkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[c_nationkey,s_nationkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = supplier.s_nationkey) and (lineitem.l_suppkey = supplier.s_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey];RF3 s_nationkey->[c_nationkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 -------------------------------PhysicalProject ---------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF0 c_custkey->[o_custkey] -----------------------------------PhysicalProject -------------------------------------filter((orders.o_orderdate < '1995-01-01') and (orders.o_orderdate >= '1994-01-01')) ---------------------------------------PhysicalOlapScan[orders] apply RFs: RF0 -----------------------------------PhysicalProject -------------------------------------PhysicalOlapScan[customer] apply RFs: RF3 RF4 ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] apply RFs: RF5 -------------------PhysicalProject ---------------------filter((region.r_name = 'ASIA')) -----------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.out deleted file mode 100644 index f1f764bec09499..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.out +++ /dev/null @@ -1,10 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---hashAgg[GLOBAL] -----PhysicalDistribute[DistributionSpecGather] -------hashAgg[LOCAL] ---------PhysicalProject -----------filter((lineitem.l_discount <= 0.07) and (lineitem.l_discount >= 0.05) and (lineitem.l_quantity < 24.00) and (lineitem.l_shipdate < '1995-01-01') and (lineitem.l_shipdate >= '1994-01-01')) -------------PhysicalOlapScan[lineitem] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.out deleted file mode 100644 index b98149f8668a1c..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.out +++ /dev/null @@ -1,34 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n2.n_nationkey) and (supplier.s_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF3 n_nationkey->[c_nationkey];RF4 n_nationkey->[s_nationkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((customer.c_custkey = orders.o_custkey)) otherCondition=() build RFs:RF2 c_custkey->[o_custkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF1 l_orderkey->[o_orderkey] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[orders] apply RFs: RF1 RF2 ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF0 s_suppkey->[l_suppkey] -------------------------------PhysicalProject ---------------------------------filter((lineitem.l_shipdate <= '1996-12-31') and (lineitem.l_shipdate >= '1995-01-01')) -----------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[supplier] apply RFs: RF4 -----------------------PhysicalProject -------------------------PhysicalOlapScan[customer] apply RFs: RF3 -------------------NestedLoopJoin[INNER_JOIN]OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]] ---------------------PhysicalProject -----------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------PhysicalOlapScan[nation] ---------------------PhysicalProject -----------------------filter(n_name IN ('FRANCE', 'GERMANY')) -------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.out deleted file mode 100644 index f3abaf5956c1a6..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.out +++ /dev/null @@ -1,44 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------PhysicalProject -----------hashAgg[GLOBAL] -------------PhysicalDistribute[DistributionSpecHash] ---------------hashAgg[LOCAL] -----------------PhysicalProject -------------------hashJoin[INNER_JOIN broadcast] hashCondition=((n1.n_regionkey = region.r_regionkey)) otherCondition=() build RFs:RF6 r_regionkey->[n_regionkey] ---------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() build RFs:RF5 n_nationkey->[s_nationkey] -------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((customer.c_nationkey = n1.n_nationkey)) otherCondition=() build RFs:RF4 n_nationkey->[c_nationkey] -----------------------------PhysicalProject -------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() build RFs:RF3 c_custkey->[o_custkey] ---------------------------------PhysicalProject -----------------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 s_suppkey->[l_suppkey] -------------------------------------PhysicalProject ---------------------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF1 p_partkey->[l_partkey] -----------------------------------------PhysicalProject -------------------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() build RFs:RF0 o_orderkey->[l_orderkey] ---------------------------------------------PhysicalProject -----------------------------------------------PhysicalOlapScan[lineitem] apply RFs: RF0 RF1 RF2 ---------------------------------------------PhysicalProject -----------------------------------------------filter((orders.o_orderdate <= '1996-12-31') and (orders.o_orderdate >= '1995-01-01')) -------------------------------------------------PhysicalOlapScan[orders] apply RFs: RF3 -----------------------------------------PhysicalProject -------------------------------------------filter((part.p_type = 'ECONOMY ANODIZED STEEL')) ---------------------------------------------PhysicalOlapScan[part] -------------------------------------PhysicalProject ---------------------------------------PhysicalOlapScan[supplier] apply RFs: RF5 ---------------------------------PhysicalProject -----------------------------------PhysicalOlapScan[customer] apply RFs: RF4 -----------------------------PhysicalProject -------------------------------PhysicalOlapScan[nation] apply RFs: RF6 -------------------------PhysicalProject ---------------------------PhysicalOlapScan[nation] ---------------------PhysicalProject -----------------------filter((region.r_name = 'AMERICA')) -------------------------PhysicalOlapScan[region] - diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.out b/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.out deleted file mode 100644 index ab51e5595023a9..00000000000000 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.out +++ /dev/null @@ -1,33 +0,0 @@ --- This file is automatically generated. You should know what you did if you want to edit this --- !select -- -PhysicalResultSink ---PhysicalQuickSort[MERGE_SORT] -----PhysicalDistribute[DistributionSpecGather] -------PhysicalQuickSort[LOCAL_SORT] ---------hashAgg[GLOBAL] -----------PhysicalDistribute[DistributionSpecHash] -------------hashAgg[LOCAL] ---------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF5 s_suppkey->[l_suppkey,ps_suppkey] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN colocated] hashCondition=((part.p_partkey = lineitem.l_partkey)) otherCondition=() build RFs:RF4 p_partkey->[l_partkey,ps_partkey] -----------------------PhysicalProject -------------------------hashJoin[INNER_JOIN shuffleBucket] hashCondition=((partsupp.ps_partkey = lineitem.l_partkey) and (partsupp.ps_suppkey = lineitem.l_suppkey)) otherCondition=() build RFs:RF2 ps_suppkey->[l_suppkey];RF3 ps_partkey->[l_partkey] ---------------------------PhysicalProject -----------------------------hashJoin[INNER_JOIN colocated] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=() build RFs:RF1 o_orderkey->[l_orderkey] -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[lineitem] apply RFs: RF1 RF2 RF3 RF4 RF5 -------------------------------PhysicalProject ---------------------------------PhysicalOlapScan[orders] ---------------------------PhysicalProject -----------------------------PhysicalOlapScan[partsupp] apply RFs: RF4 RF5 -----------------------PhysicalProject -------------------------filter((p_name like '%green%')) ---------------------------PhysicalOlapScan[part] -------------------PhysicalProject ---------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = nation.n_nationkey)) otherCondition=() build RFs:RF0 n_nationkey->[s_nationkey] -----------------------PhysicalProject -------------------------PhysicalOlapScan[supplier] apply RFs: RF0 -----------------------PhysicalProject -------------------------PhysicalOlapScan[nation] - diff --git a/regression-test/data/nereids_clickbench_shape_p0/query1.out b/regression-test/data/shape_check/clickbench/query1.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query1.out rename to regression-test/data/shape_check/clickbench/query1.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query10.out b/regression-test/data/shape_check/clickbench/query10.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query10.out rename to regression-test/data/shape_check/clickbench/query10.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query11.out b/regression-test/data/shape_check/clickbench/query11.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query11.out rename to regression-test/data/shape_check/clickbench/query11.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query12.out b/regression-test/data/shape_check/clickbench/query12.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query12.out rename to regression-test/data/shape_check/clickbench/query12.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query13.out b/regression-test/data/shape_check/clickbench/query13.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query13.out rename to regression-test/data/shape_check/clickbench/query13.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query14.out b/regression-test/data/shape_check/clickbench/query14.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query14.out rename to regression-test/data/shape_check/clickbench/query14.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query15.out b/regression-test/data/shape_check/clickbench/query15.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query15.out rename to regression-test/data/shape_check/clickbench/query15.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query16.out b/regression-test/data/shape_check/clickbench/query16.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query16.out rename to regression-test/data/shape_check/clickbench/query16.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query17.out b/regression-test/data/shape_check/clickbench/query17.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query17.out rename to regression-test/data/shape_check/clickbench/query17.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query18.out b/regression-test/data/shape_check/clickbench/query18.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query18.out rename to regression-test/data/shape_check/clickbench/query18.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query19.out b/regression-test/data/shape_check/clickbench/query19.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query19.out rename to regression-test/data/shape_check/clickbench/query19.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query2.out b/regression-test/data/shape_check/clickbench/query2.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query2.out rename to regression-test/data/shape_check/clickbench/query2.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query20.out b/regression-test/data/shape_check/clickbench/query20.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query20.out rename to regression-test/data/shape_check/clickbench/query20.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query21.out b/regression-test/data/shape_check/clickbench/query21.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query21.out rename to regression-test/data/shape_check/clickbench/query21.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query22.out b/regression-test/data/shape_check/clickbench/query22.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query22.out rename to regression-test/data/shape_check/clickbench/query22.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query23.out b/regression-test/data/shape_check/clickbench/query23.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query23.out rename to regression-test/data/shape_check/clickbench/query23.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query24.out b/regression-test/data/shape_check/clickbench/query24.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query24.out rename to regression-test/data/shape_check/clickbench/query24.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query25.out b/regression-test/data/shape_check/clickbench/query25.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query25.out rename to regression-test/data/shape_check/clickbench/query25.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query26.out b/regression-test/data/shape_check/clickbench/query26.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query26.out rename to regression-test/data/shape_check/clickbench/query26.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query27.out b/regression-test/data/shape_check/clickbench/query27.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query27.out rename to regression-test/data/shape_check/clickbench/query27.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query28.out b/regression-test/data/shape_check/clickbench/query28.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query28.out rename to regression-test/data/shape_check/clickbench/query28.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query29.out b/regression-test/data/shape_check/clickbench/query29.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query29.out rename to regression-test/data/shape_check/clickbench/query29.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query3.out b/regression-test/data/shape_check/clickbench/query3.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query3.out rename to regression-test/data/shape_check/clickbench/query3.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query30.out b/regression-test/data/shape_check/clickbench/query30.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query30.out rename to regression-test/data/shape_check/clickbench/query30.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query31.out b/regression-test/data/shape_check/clickbench/query31.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query31.out rename to regression-test/data/shape_check/clickbench/query31.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query32.out b/regression-test/data/shape_check/clickbench/query32.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query32.out rename to regression-test/data/shape_check/clickbench/query32.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query33.out b/regression-test/data/shape_check/clickbench/query33.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query33.out rename to regression-test/data/shape_check/clickbench/query33.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query34.out b/regression-test/data/shape_check/clickbench/query34.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query34.out rename to regression-test/data/shape_check/clickbench/query34.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query35.out b/regression-test/data/shape_check/clickbench/query35.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query35.out rename to regression-test/data/shape_check/clickbench/query35.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query36.out b/regression-test/data/shape_check/clickbench/query36.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query36.out rename to regression-test/data/shape_check/clickbench/query36.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query37.out b/regression-test/data/shape_check/clickbench/query37.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query37.out rename to regression-test/data/shape_check/clickbench/query37.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query38.out b/regression-test/data/shape_check/clickbench/query38.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query38.out rename to regression-test/data/shape_check/clickbench/query38.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query39.out b/regression-test/data/shape_check/clickbench/query39.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query39.out rename to regression-test/data/shape_check/clickbench/query39.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query4.out b/regression-test/data/shape_check/clickbench/query4.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query4.out rename to regression-test/data/shape_check/clickbench/query4.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query40.out b/regression-test/data/shape_check/clickbench/query40.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query40.out rename to regression-test/data/shape_check/clickbench/query40.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query41.out b/regression-test/data/shape_check/clickbench/query41.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query41.out rename to regression-test/data/shape_check/clickbench/query41.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query42.out b/regression-test/data/shape_check/clickbench/query42.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query42.out rename to regression-test/data/shape_check/clickbench/query42.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query43.out b/regression-test/data/shape_check/clickbench/query43.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query43.out rename to regression-test/data/shape_check/clickbench/query43.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query5.out b/regression-test/data/shape_check/clickbench/query5.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query5.out rename to regression-test/data/shape_check/clickbench/query5.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query6.out b/regression-test/data/shape_check/clickbench/query6.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query6.out rename to regression-test/data/shape_check/clickbench/query6.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query7.out b/regression-test/data/shape_check/clickbench/query7.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query7.out rename to regression-test/data/shape_check/clickbench/query7.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query8.out b/regression-test/data/shape_check/clickbench/query8.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query8.out rename to regression-test/data/shape_check/clickbench/query8.out diff --git a/regression-test/data/nereids_clickbench_shape_p0/query9.out b/regression-test/data/shape_check/clickbench/query9.out similarity index 100% rename from regression-test/data/nereids_clickbench_shape_p0/query9.out rename to regression-test/data/shape_check/clickbench/query9.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/flat.out b/regression-test/data/shape_check/ssb_sf100/shape/flat.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/flat.out rename to regression-test/data/shape_check/ssb_sf100/shape/flat.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.1.out b/regression-test/data/shape_check/ssb_sf100/shape/q1.1.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.1.out rename to regression-test/data/shape_check/ssb_sf100/shape/q1.1.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.2.out b/regression-test/data/shape_check/ssb_sf100/shape/q1.2.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.2.out rename to regression-test/data/shape_check/ssb_sf100/shape/q1.2.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.3.out b/regression-test/data/shape_check/ssb_sf100/shape/q1.3.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q1.3.out rename to regression-test/data/shape_check/ssb_sf100/shape/q1.3.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.1.out b/regression-test/data/shape_check/ssb_sf100/shape/q2.1.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.1.out rename to regression-test/data/shape_check/ssb_sf100/shape/q2.1.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.2.out b/regression-test/data/shape_check/ssb_sf100/shape/q2.2.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.2.out rename to regression-test/data/shape_check/ssb_sf100/shape/q2.2.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.3.out b/regression-test/data/shape_check/ssb_sf100/shape/q2.3.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q2.3.out rename to regression-test/data/shape_check/ssb_sf100/shape/q2.3.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.1.out b/regression-test/data/shape_check/ssb_sf100/shape/q3.1.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.1.out rename to regression-test/data/shape_check/ssb_sf100/shape/q3.1.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.2.out b/regression-test/data/shape_check/ssb_sf100/shape/q3.2.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.2.out rename to regression-test/data/shape_check/ssb_sf100/shape/q3.2.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.3.out b/regression-test/data/shape_check/ssb_sf100/shape/q3.3.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.3.out rename to regression-test/data/shape_check/ssb_sf100/shape/q3.3.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.4.out b/regression-test/data/shape_check/ssb_sf100/shape/q3.4.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q3.4.out rename to regression-test/data/shape_check/ssb_sf100/shape/q3.4.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.1.out b/regression-test/data/shape_check/ssb_sf100/shape/q4.1.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.1.out rename to regression-test/data/shape_check/ssb_sf100/shape/q4.1.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.2.out b/regression-test/data/shape_check/ssb_sf100/shape/q4.2.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.2.out rename to regression-test/data/shape_check/ssb_sf100/shape/q4.2.out diff --git a/regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.3.out b/regression-test/data/shape_check/ssb_sf100/shape/q4.3.out similarity index 100% rename from regression-test/data/nereids_ssb_shape_sf100_p0/shape/q4.3.out rename to regression-test/data/shape_check/ssb_sf100/shape/q4.3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/constraints/query23.out b/regression-test/data/shape_check/tpcds_sf100/constraints/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/constraints/query23.out rename to regression-test/data/shape_check/tpcds_sf100/constraints/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query43.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query43.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query89.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query9.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query9.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query9.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.out b/regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.out rename to regression-test/data/shape_check/tpcds_sf100/noStatsRfPrune/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query9.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query9.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.out b/regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.out rename to regression-test/data/shape_check/tpcds_sf100/no_stats_shape/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query1.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query1.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query10.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query10.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query11.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query11.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query12.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query12.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query13.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query13.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query14.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query14.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query15.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query15.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query16.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query16.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query17.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query17.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query18.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query18.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query19.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query19.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query2.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query2.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query20.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query20.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query21.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query21.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query22.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query22.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query23.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query23.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query24.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query24.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query25.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query25.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query26.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query26.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query27.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query27.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query28.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query28.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query29.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query29.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query3.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query3.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query30.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query30.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query31.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query31.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query32.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query32.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query33.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query33.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query34.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query34.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query35.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query35.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query36.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query36.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query37.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query37.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query38.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query38.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query39.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query39.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query4.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query4.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query40.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query40.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query41.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query41.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query42.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query42.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query44.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query44.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query45.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query45.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query46.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query46.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query47.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query47.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query48.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query48.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query49.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query49.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query5.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query5.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query50.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query50.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query51.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query51.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query52.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query52.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query53.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query53.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query54.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query54.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query55.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query55.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query56.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query56.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query57.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query57.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query58.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query58.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query59.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query59.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query6.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query6.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query60.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query60.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query61.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query61.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query62.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query62.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query63.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query63.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query64.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query64.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query65.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query65.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query66.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query66.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query67.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query67.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query68.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query68.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query69.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query69.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query7.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query7.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query70.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query70.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query71.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query71.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query72.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query72.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query73.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query73.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query74.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query74.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query75.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query75.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query76.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query76.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query77.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query77.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query78.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query78.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query79.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query79.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query8.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query8.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query80.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query80.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query81.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query81.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query82.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query82.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query83.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query83.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query84.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query84.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query85.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query85.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query86.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query86.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query87.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query87.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query88.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query88.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query89.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query89.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query90.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query90.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query91.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query91.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query92.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query92.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query93.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query93.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query94.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query94.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query95.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query95.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query96.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query96.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query97.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query97.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query98.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query98.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query99.out b/regression-test/data/shape_check/tpcds_sf100/rf_prune/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query99.out rename to regression-test/data/shape_check/tpcds_sf100/rf_prune/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query1.out b/regression-test/data/shape_check/tpcds_sf100/shape/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query1.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query10.out b/regression-test/data/shape_check/tpcds_sf100/shape/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query10.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query11.out b/regression-test/data/shape_check/tpcds_sf100/shape/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query11.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query12.out b/regression-test/data/shape_check/tpcds_sf100/shape/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query12.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query13.out b/regression-test/data/shape_check/tpcds_sf100/shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query13.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query14.out b/regression-test/data/shape_check/tpcds_sf100/shape/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query14.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query15.out b/regression-test/data/shape_check/tpcds_sf100/shape/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query15.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query16.out b/regression-test/data/shape_check/tpcds_sf100/shape/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query16.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query17.out b/regression-test/data/shape_check/tpcds_sf100/shape/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query17.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query18.out b/regression-test/data/shape_check/tpcds_sf100/shape/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query18.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query19.out b/regression-test/data/shape_check/tpcds_sf100/shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query19.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query2.out b/regression-test/data/shape_check/tpcds_sf100/shape/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query2.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query20.out b/regression-test/data/shape_check/tpcds_sf100/shape/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query20.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query21.out b/regression-test/data/shape_check/tpcds_sf100/shape/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query21.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query22.out b/regression-test/data/shape_check/tpcds_sf100/shape/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query22.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query23.out b/regression-test/data/shape_check/tpcds_sf100/shape/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query23.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query24.out b/regression-test/data/shape_check/tpcds_sf100/shape/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query24.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query25.out b/regression-test/data/shape_check/tpcds_sf100/shape/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query25.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query26.out b/regression-test/data/shape_check/tpcds_sf100/shape/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query26.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query27.out b/regression-test/data/shape_check/tpcds_sf100/shape/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query27.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query28.out b/regression-test/data/shape_check/tpcds_sf100/shape/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query28.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query29.out b/regression-test/data/shape_check/tpcds_sf100/shape/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query29.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query3.out b/regression-test/data/shape_check/tpcds_sf100/shape/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query3.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query30.out b/regression-test/data/shape_check/tpcds_sf100/shape/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query30.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query31.out b/regression-test/data/shape_check/tpcds_sf100/shape/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query31.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query32.out b/regression-test/data/shape_check/tpcds_sf100/shape/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query32.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query33.out b/regression-test/data/shape_check/tpcds_sf100/shape/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query33.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query34.out b/regression-test/data/shape_check/tpcds_sf100/shape/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query34.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query35.out b/regression-test/data/shape_check/tpcds_sf100/shape/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query35.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query36.out b/regression-test/data/shape_check/tpcds_sf100/shape/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query36.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query37.out b/regression-test/data/shape_check/tpcds_sf100/shape/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query37.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query38.out b/regression-test/data/shape_check/tpcds_sf100/shape/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query38.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query39.out b/regression-test/data/shape_check/tpcds_sf100/shape/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query39.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query4.out b/regression-test/data/shape_check/tpcds_sf100/shape/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query4.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query40.out b/regression-test/data/shape_check/tpcds_sf100/shape/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query40.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query41.out b/regression-test/data/shape_check/tpcds_sf100/shape/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query41.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query42.out b/regression-test/data/shape_check/tpcds_sf100/shape/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query42.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query43.out b/regression-test/data/shape_check/tpcds_sf100/shape/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query43.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query44.out b/regression-test/data/shape_check/tpcds_sf100/shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query44.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query45.out b/regression-test/data/shape_check/tpcds_sf100/shape/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query45.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query46.out b/regression-test/data/shape_check/tpcds_sf100/shape/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query46.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query47.out b/regression-test/data/shape_check/tpcds_sf100/shape/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query47.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query48.out b/regression-test/data/shape_check/tpcds_sf100/shape/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query48.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query49.out b/regression-test/data/shape_check/tpcds_sf100/shape/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query49.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query5.out b/regression-test/data/shape_check/tpcds_sf100/shape/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query5.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query50.out b/regression-test/data/shape_check/tpcds_sf100/shape/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query50.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query51.out b/regression-test/data/shape_check/tpcds_sf100/shape/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query51.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query52.out b/regression-test/data/shape_check/tpcds_sf100/shape/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query52.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query53.out b/regression-test/data/shape_check/tpcds_sf100/shape/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query53.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query54.out b/regression-test/data/shape_check/tpcds_sf100/shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query54.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query55.out b/regression-test/data/shape_check/tpcds_sf100/shape/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query55.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query56.out b/regression-test/data/shape_check/tpcds_sf100/shape/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query56.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query57.out b/regression-test/data/shape_check/tpcds_sf100/shape/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query57.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query58.out b/regression-test/data/shape_check/tpcds_sf100/shape/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query58.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query59.out b/regression-test/data/shape_check/tpcds_sf100/shape/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query59.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query59.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query6.out b/regression-test/data/shape_check/tpcds_sf100/shape/query6.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query6.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query60.out b/regression-test/data/shape_check/tpcds_sf100/shape/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query60.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query61.out b/regression-test/data/shape_check/tpcds_sf100/shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query61.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query62.out b/regression-test/data/shape_check/tpcds_sf100/shape/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query62.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query63.out b/regression-test/data/shape_check/tpcds_sf100/shape/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query63.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query64.out b/regression-test/data/shape_check/tpcds_sf100/shape/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query64.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query65.out b/regression-test/data/shape_check/tpcds_sf100/shape/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query65.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query66.out b/regression-test/data/shape_check/tpcds_sf100/shape/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query66.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query67.out b/regression-test/data/shape_check/tpcds_sf100/shape/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query67.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query68.out b/regression-test/data/shape_check/tpcds_sf100/shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query68.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query69.out b/regression-test/data/shape_check/tpcds_sf100/shape/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query69.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query7.out b/regression-test/data/shape_check/tpcds_sf100/shape/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query7.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query70.out b/regression-test/data/shape_check/tpcds_sf100/shape/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query70.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query71.out b/regression-test/data/shape_check/tpcds_sf100/shape/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query71.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query72.out b/regression-test/data/shape_check/tpcds_sf100/shape/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query72.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query73.out b/regression-test/data/shape_check/tpcds_sf100/shape/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query73.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query74.out b/regression-test/data/shape_check/tpcds_sf100/shape/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query74.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query75.out b/regression-test/data/shape_check/tpcds_sf100/shape/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query75.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query76.out b/regression-test/data/shape_check/tpcds_sf100/shape/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query76.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query77.out b/regression-test/data/shape_check/tpcds_sf100/shape/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query77.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query78.out b/regression-test/data/shape_check/tpcds_sf100/shape/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query78.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query79.out b/regression-test/data/shape_check/tpcds_sf100/shape/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query79.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query8.out b/regression-test/data/shape_check/tpcds_sf100/shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query8.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query80.out b/regression-test/data/shape_check/tpcds_sf100/shape/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query80.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query81.out b/regression-test/data/shape_check/tpcds_sf100/shape/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query81.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query82.out b/regression-test/data/shape_check/tpcds_sf100/shape/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query82.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query83.out b/regression-test/data/shape_check/tpcds_sf100/shape/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query83.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query84.out b/regression-test/data/shape_check/tpcds_sf100/shape/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query84.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query85.out b/regression-test/data/shape_check/tpcds_sf100/shape/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query85.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query86.out b/regression-test/data/shape_check/tpcds_sf100/shape/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query86.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query87.out b/regression-test/data/shape_check/tpcds_sf100/shape/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query87.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query88.out b/regression-test/data/shape_check/tpcds_sf100/shape/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query88.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query89.out b/regression-test/data/shape_check/tpcds_sf100/shape/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query89.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.out b/regression-test/data/shape_check/tpcds_sf100/shape/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query90.out b/regression-test/data/shape_check/tpcds_sf100/shape/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query90.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query91.out b/regression-test/data/shape_check/tpcds_sf100/shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query91.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query92.out b/regression-test/data/shape_check/tpcds_sf100/shape/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query92.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query93.out b/regression-test/data/shape_check/tpcds_sf100/shape/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query93.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query94.out b/regression-test/data/shape_check/tpcds_sf100/shape/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query94.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query95.out b/regression-test/data/shape_check/tpcds_sf100/shape/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query95.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query96.out b/regression-test/data/shape_check/tpcds_sf100/shape/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query96.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query97.out b/regression-test/data/shape_check/tpcds_sf100/shape/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query97.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query98.out b/regression-test/data/shape_check/tpcds_sf100/shape/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query98.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query99.out b/regression-test/data/shape_check/tpcds_sf100/shape/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query99.out rename to regression-test/data/shape_check/tpcds_sf100/shape/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query13.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query13.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query19.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query19.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query44.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query44.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query44.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query45.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query45.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query45.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query54.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query54.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query54.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query56.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query56.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query56.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query6.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query6.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query61.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query61.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query68.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query68.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query8.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query8.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query91.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query91.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query91.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query95.out b/regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query95.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query95.out rename to regression-test/data/shape_check/tpcds_sf1000/bs_downgrade_shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/eliminate_empty/query10_empty.out b/regression-test/data/shape_check/tpcds_sf1000/eliminate_empty/query10_empty.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/eliminate_empty/query10_empty.out rename to regression-test/data/shape_check/tpcds_sf1000/eliminate_empty/query10_empty.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query1.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query1.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query1.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query1.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query10.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query10.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query10.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query10.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query11.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query11.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query11.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query11.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query12.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query12.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query12.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query12.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query13.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query13.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query13.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query13.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query14.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query14.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query14.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query14.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query15.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query15.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query15.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query15.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query16.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query16.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query16.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query16.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query17.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query17.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query17.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query17.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query18.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query18.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query18.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query18.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query19.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query19.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query19.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query19.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query2.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query2.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query2.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query2.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query20.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query20.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query20.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query20.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query21.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query21.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query21.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query21.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query22.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query22.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query22.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query22.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query23.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query23.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query23.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query23.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query24.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query24.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query24.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query24.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query25.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query25.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query25.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query25.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query26.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query26.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query26.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query26.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query27.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query27.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query27.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query27.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query28.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query28.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query28.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query28.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query29.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query29.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query29.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query29.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query3.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query3.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query3.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query3.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query30.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query30.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query30.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query30.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query31.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query31.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query31.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query31.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query32.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query32.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query32.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query32.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query33.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query33.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query33.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query33.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query34.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query34.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query34.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query34.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query35.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query35.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query35.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query35.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query36.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query36.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query36.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query36.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query37.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query37.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query37.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query37.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query38.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query38.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query38.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query38.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query39.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query39.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query39.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query39.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query4.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query4.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query4.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query4.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query40.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query40.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query40.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query40.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query41.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query41.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query41.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query41.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query42.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query42.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query42.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query42.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query43.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query43.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query43.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query43.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query44.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query44.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query44.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query45.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query45.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query45.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query46.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query46.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query46.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query46.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query47.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query47.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query47.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query47.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query48.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query48.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query48.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query48.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query49.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query49.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query49.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query49.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query5.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query5.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query5.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query5.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query50.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query50.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query50.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query50.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query51.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query51.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query51.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query51.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query52.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query52.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query52.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query52.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query53.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query53.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query53.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query53.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query54.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query54.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query54.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query54.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query55.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query55.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query55.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query56.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query56.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query56.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query57.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query57.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query57.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query57.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query58.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query58.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query58.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query58.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query59.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query59.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query59.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query6.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query6.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query6.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query60.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query60.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query60.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query60.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query61.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query61.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query61.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query61.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query62.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query62.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query62.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query62.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query63.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query63.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query63.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query63.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query64.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query64.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query64.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query64.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query65.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query65.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query65.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query65.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query66.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query66.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query66.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query66.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query67.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query67.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query67.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query67.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query68.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query68.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query68.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query68.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query69.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query69.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query69.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query69.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query7.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query7.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query7.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query7.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query70.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query70.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query70.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query70.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query71.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query71.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query71.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query71.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query72.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query72.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query72.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query72.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query73.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query73.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query73.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query73.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query74.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query74.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query74.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query74.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query75.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query75.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query75.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query75.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query76.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query76.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query76.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query76.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query77.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query77.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query77.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query77.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query78.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query78.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query78.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query78.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query79.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query79.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query79.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query79.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query8.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query8.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query8.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query8.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query80.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query80.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query80.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query80.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query81.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query81.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query81.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query81.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query82.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query82.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query82.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query82.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query83.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query83.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query83.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query83.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query84.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query84.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query84.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query84.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query85.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query85.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query85.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query85.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query86.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query86.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query86.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query86.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query87.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query87.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query87.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query87.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query88.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query88.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query88.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query88.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query89.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query89.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query89.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query9.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/rf_prune/query9.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query9.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query90.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query90.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query90.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query90.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query91.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query91.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query91.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query91.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query92.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query92.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query92.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query92.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query93.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query93.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query93.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query93.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query94.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query94.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query94.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query95.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query95.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query95.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query96.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query96.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query96.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query96.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query97.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query97.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query97.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query97.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query98.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query98.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query98.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query98.out diff --git a/regression-test/data/nereids_hint_tpcds_p0/shape/query99.out b/regression-test/data/shape_check/tpcds_sf1000/hint/query99.out similarity index 100% rename from regression-test/data/nereids_hint_tpcds_p0/shape/query99.out rename to regression-test/data/shape_check/tpcds_sf1000/hint/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query1.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query1.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query10.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query10.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query11.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query11.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query12.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query12.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query13.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query13.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query14.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query14.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query15.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query15.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query16.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query16.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query17.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query17.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query18.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query18.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query19.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query19.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query2.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query2.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query20.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query20.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query21.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query21.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query22.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query22.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query23.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query23.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query24.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query24.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query25.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query25.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query26.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query26.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query27.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query27.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query28.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query28.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query29.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query29.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query3.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query3.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query30.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query30.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query31.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query31.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query32.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query32.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query33.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query33.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query34.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query34.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query35.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query35.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query36.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query36.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query37.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query37.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query38.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query38.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query39.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query39.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query4.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query4.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query40.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query40.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query41.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query41.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query42.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query42.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query43.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query43.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query44.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query44.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query45.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query45.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query46.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query46.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query47.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query47.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query48.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query48.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query49.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query49.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query5.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query5.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query50.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query50.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query51.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query51.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query52.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query52.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query53.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query53.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query54.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query54.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query55.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query55.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query56.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query56.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query57.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query57.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query58.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query58.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query59.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query59.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query6.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query6.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query60.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query60.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query61.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query61.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query62.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query62.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query63.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query63.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query64.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query64.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query65.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query65.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query66.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query66.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query67.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query67.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query68.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query68.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query69.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query69.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query7.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query7.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query70.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query70.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query71.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query71.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query72.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query72.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query73.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query73.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query74.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query74.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query75.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query75.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query76.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query76.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query77.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query77.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query78.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query78.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query79.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query79.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query8.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query8.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query80.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query80.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query81.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query81.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query82.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query82.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query83.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query83.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query84.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query84.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query85.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query85.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query86.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query86.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query87.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query87.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query88.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query88.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query89.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query89.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query9.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf100_p0/shape/query9.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query90.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query90.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query91.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query91.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query92.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query92.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query93.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query93.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query94.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query94.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query95.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query95.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query96.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query96.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query97.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query97.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query98.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query98.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query99.out b/regression-test/data/shape_check/tpcds_sf1000/shape/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf1000_p0/shape/query99.out rename to regression-test/data/shape_check/tpcds_sf1000/shape/query99.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query1.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query1.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query1.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query1.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query10.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query10.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query10.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query10.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query11.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query11.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query11.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query11.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query12.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query12.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query12.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query12.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query13.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query13.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query13.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query13.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query14.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query14.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query14.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query14.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query15.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query15.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query15.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query15.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query16.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query16.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query16.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query16.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query17.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query17.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query17.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query17.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query18.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query18.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query18.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query18.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query19.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query19.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query19.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query19.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query2.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query2.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query2.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query2.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query20.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query20.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query20.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query20.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query21.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query21.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query21.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query21.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query22.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query22.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query22.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query22.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query23.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query23.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query23.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query23.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query24.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query24.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query24.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query24.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query25.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query25.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query25.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query25.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query26.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query26.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query26.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query26.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query27.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query27.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query27.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query27.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query28.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query28.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query28.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query28.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query29.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query29.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query29.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query29.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query3.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query3.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query3.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query3.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query30.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query30.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query30.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query30.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query31.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query31.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query31.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query31.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query32.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query32.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query32.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query32.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query33.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query33.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query33.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query33.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query34.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query34.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query34.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query34.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query35.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query35.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query35.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query35.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query36.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query36.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query36.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query36.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query37.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query37.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query37.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query37.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query38.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query38.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query38.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query38.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query39.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query39.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query39.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query39.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query4.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query4.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query4.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query4.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query40.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query40.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query40.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query40.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query41.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query41.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query41.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query41.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query42.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query42.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query42.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query42.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query43.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query43.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query43.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query43.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query44.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query44.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query44.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query44.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query45.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query45.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query45.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query45.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query46.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query46.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query46.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query46.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query47.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query47.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query47.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query47.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query48.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query48.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query48.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query48.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query49.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query49.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query49.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query49.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query5.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query5.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query5.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query5.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query50.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query50.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query50.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query50.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query51.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query51.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query51.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query51.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query52.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query52.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query52.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query52.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query53.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query53.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query53.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query53.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query54.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query54.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query54.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query54.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query55.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query55.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query55.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query55.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query56.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query56.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query56.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query56.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query57.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query57.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query57.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query57.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query58.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query58.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query58.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query58.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query59.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query59.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query59.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query59.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query6.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query6.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query6.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query6.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query60.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query60.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query60.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query60.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query61.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query61.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query61.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query61.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query62.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query62.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query62.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query62.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query63.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query63.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query63.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query63.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query64.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query64.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query64.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query64.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query65.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query65.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query65.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query65.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query66.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query66.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query66.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query66.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query67.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query67.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query67.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query67.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query68.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query68.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query68.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query68.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query69.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query69.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query69.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query69.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query7.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query7.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query7.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query7.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query70.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query70.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query70.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query70.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query71.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query71.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query71.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query71.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query72.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query72.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query72.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query72.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query73.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query73.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query73.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query73.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query74.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query74.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query74.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query74.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query75.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query75.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query75.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query75.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query76.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query76.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query76.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query76.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query77.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query77.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query77.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query77.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query78.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query78.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query78.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query78.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query79.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query79.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query79.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query79.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query8.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query8.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query8.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query8.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query80.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query80.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query80.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query80.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query81.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query81.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query81.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query81.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query82.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query82.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query82.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query82.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query83.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query83.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query83.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query83.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query84.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query84.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query84.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query84.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query85.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query85.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query85.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query85.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query86.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query86.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query86.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query86.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query87.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query87.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query87.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query87.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query88.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query88.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query88.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query88.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query89.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query89.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query89.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query89.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query9.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query9.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query9.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query9.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query90.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query90.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query90.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query90.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query91.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query91.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query91.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query91.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query92.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query92.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query92.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query92.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query93.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query93.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query93.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query93.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query94.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query94.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query94.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query94.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query95.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query95.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query95.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query95.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query96.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query96.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query96.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query96.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query97.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query97.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query97.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query97.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query98.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query98.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query98.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query98.out diff --git a/regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query99.out b/regression-test/data/shape_check/tpcds_sf10t_orc/shape/query99.out similarity index 100% rename from regression-test/data/nereids_tpcds_shape_sf10t_orc/shape/query99.out rename to regression-test/data/shape_check/tpcds_sf10t_orc/shape/query99.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q1.out b/regression-test/data/shape_check/tpch_sf1000/hint/q1.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q1.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q1.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q10.out b/regression-test/data/shape_check/tpch_sf1000/hint/q10.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q10.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q10.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q11.out b/regression-test/data/shape_check/tpch_sf1000/hint/q11.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q11.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q11.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q12.out b/regression-test/data/shape_check/tpch_sf1000/hint/q12.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q12.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q12.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q13.out b/regression-test/data/shape_check/tpch_sf1000/hint/q13.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q13.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q13.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q14.out b/regression-test/data/shape_check/tpch_sf1000/hint/q14.out similarity index 82% rename from regression-test/data/nereids_hint_tpch_p0/shape/q14.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q14.out index 3633709f96fa8a..d92539ff58dce4 100644 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q14.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q14.out @@ -6,7 +6,7 @@ PhysicalResultSink ------PhysicalDistribute[DistributionSpecGather] --------hashAgg[LOCAL] ----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() +------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_partkey = part.p_partkey)) otherCondition=() --------------PhysicalProject ----------------PhysicalOlapScan[part] --------------PhysicalProject diff --git a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.out b/regression-test/data/shape_check/tpch_sf1000/hint/q15.out similarity index 95% rename from regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q15.out index 9e6b383230a34f..4e525326a19945 100644 --- a/regression-test/data/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q15.out @@ -9,14 +9,14 @@ PhysicalResultSink ------------PhysicalProject --------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = revenue0.supplier_no)) otherCondition=() ----------------PhysicalProject +------------------PhysicalOlapScan[supplier] +----------------PhysicalProject ------------------hashAgg[GLOBAL] --------------------PhysicalDistribute[DistributionSpecHash] ----------------------hashAgg[LOCAL] ------------------------PhysicalProject --------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) ----------------------------PhysicalOlapScan[lineitem] -----------------PhysicalProject -------------------PhysicalOlapScan[supplier] ------------hashAgg[GLOBAL] --------------PhysicalDistribute[DistributionSpecGather] ----------------hashAgg[LOCAL] @@ -28,3 +28,8 @@ PhysicalResultSink ----------------------------filter((lineitem.l_shipdate < '1996-04-01') and (lineitem.l_shipdate >= '1996-01-01')) ------------------------------PhysicalOlapScan[lineitem] +Hint log: +Used: leading(supplier revenue0 ) +UnUsed: +SyntaxError: + diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q17.out b/regression-test/data/shape_check/tpch_sf1000/hint/q17.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q17.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q17.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q19.out b/regression-test/data/shape_check/tpch_sf1000/hint/q19.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q19.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q19.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q3.out b/regression-test/data/shape_check/tpch_sf1000/hint/q3.out similarity index 88% rename from regression-test/data/nereids_hint_tpch_p0/shape/q3.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q3.out index a58bf8720464f4..0e310fdb46e2c0 100644 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q3.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q3.out @@ -6,7 +6,7 @@ PhysicalResultSink ------PhysicalTopN[LOCAL_SORT] --------hashAgg[LOCAL] ----------PhysicalProject -------------hashJoin[INNER_JOIN broadcast] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() +------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() --------------PhysicalProject ----------------filter((lineitem.l_shipdate > '1995-03-15')) ------------------PhysicalOlapScan[lineitem] diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q4.out b/regression-test/data/shape_check/tpch_sf1000/hint/q4.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q4.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q4.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q5.out b/regression-test/data/shape_check/tpch_sf1000/hint/q5.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q5.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q5.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q6.out b/regression-test/data/shape_check/tpch_sf1000/hint/q6.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q6.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q6.out diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q7.out b/regression-test/data/shape_check/tpch_sf1000/hint/q7.out similarity index 89% rename from regression-test/data/nereids_hint_tpch_p0/shape/q7.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q7.out index d9fb0e0791a203..62b5874d806aed 100644 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q7.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q7.out @@ -8,7 +8,7 @@ PhysicalResultSink ----------PhysicalDistribute[DistributionSpecHash] ------------hashAgg[LOCAL] --------------PhysicalProject -----------------hashJoin[INNER_JOIN broadcast] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) +----------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((orders.o_orderkey = lineitem.l_orderkey)) otherCondition=(OR[AND[(n1.n_name = 'FRANCE'),(n2.n_name = 'GERMANY')],AND[(n1.n_name = 'GERMANY'),(n2.n_name = 'FRANCE')]]) ------------------PhysicalProject --------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() ----------------------PhysicalProject diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q8.out b/regression-test/data/shape_check/tpch_sf1000/hint/q8.out similarity index 90% rename from regression-test/data/nereids_hint_tpch_p0/shape/q8.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q8.out index 486e40152fb644..7be8c0af45a719 100644 --- a/regression-test/data/nereids_hint_tpch_p0/shape/q8.out +++ b/regression-test/data/shape_check/tpch_sf1000/hint/q8.out @@ -11,11 +11,11 @@ PhysicalResultSink ----------------PhysicalProject ------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_nationkey = n2.n_nationkey)) otherCondition=() --------------------PhysicalProject -----------------------hashJoin[INNER_JOIN broadcast] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() +----------------------hashJoin[INNER_JOIN bucketShuffle] hashCondition=((supplier.s_suppkey = lineitem.l_suppkey)) otherCondition=() ------------------------PhysicalProject --------------------------PhysicalOlapScan[supplier] ------------------------PhysicalProject ---------------------------hashJoin[INNER_JOIN broadcast] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() +--------------------------hashJoin[INNER_JOIN shuffle] hashCondition=((orders.o_custkey = customer.c_custkey)) otherCondition=() ----------------------------PhysicalProject ------------------------------hashJoin[INNER_JOIN colocated] hashCondition=((lineitem.l_orderkey = orders.o_orderkey)) otherCondition=() --------------------------------PhysicalProject diff --git a/regression-test/data/nereids_hint_tpch_p0/shape/q9.out b/regression-test/data/shape_check/tpch_sf1000/hint/q9.out similarity index 100% rename from regression-test/data/nereids_hint_tpch_p0/shape/q9.out rename to regression-test/data/shape_check/tpch_sf1000/hint/q9.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q1.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q1.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q10.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q10.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q11.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q11.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q12.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q12.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q13.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q13.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q14.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q14.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q15.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q15.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q16.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q16.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q17.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q17.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q18.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q18.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q19.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q19.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q2.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q2.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q20-rewrite.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q20-rewrite.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q20.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q20.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q21.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q21.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q22.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q22.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q3.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q3.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q4.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q4.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q5.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q5.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q6.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q6.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q7.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q7.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q8.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q8.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.out b/regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q9.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.out rename to regression-test/data/shape_check/tpch_sf1000/nostats_rf_prune/q9.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q1.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q1.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q1.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q1.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q10.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q10.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q10.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q10.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q11.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q11.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q11.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q11.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q12.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q12.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q12.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q12.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q13.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q13.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q13.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q13.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q14.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q14.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q14.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q14.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q15.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q15.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q15.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q15.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q16.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q16.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q16.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q16.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q17.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q17.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q17.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q17.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q18.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q18.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q18.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q18.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q19.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q19.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q19.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q19.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q2.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q2.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q2.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q2.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q20-rewrite.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q20-rewrite.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q20.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q20.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q20.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q21.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q21.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q21.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q21.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q22.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q22.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q22.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q22.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q3.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q3.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q3.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q3.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q4.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q4.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q4.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q4.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q5.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q5.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q5.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q5.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q6.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q6.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q6.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q6.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q7.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q7.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q7.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q7.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q8.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q8.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q8.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q8.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q9.out b/regression-test/data/shape_check/tpch_sf1000/rf_prune/q9.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/rf_prune/q9.out rename to regression-test/data/shape_check/tpch_sf1000/rf_prune/q9.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/runtime_filter/test_pushdown_setop.out b/regression-test/data/shape_check/tpch_sf1000/runtime_filter/test_pushdown_setop.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/runtime_filter/test_pushdown_setop.out rename to regression-test/data/shape_check/tpch_sf1000/runtime_filter/test_pushdown_setop.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q1.out b/regression-test/data/shape_check/tpch_sf1000/shape/q1.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q1.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q1.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q10.out b/regression-test/data/shape_check/tpch_sf1000/shape/q10.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q10.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q10.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q11.out b/regression-test/data/shape_check/tpch_sf1000/shape/q11.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q11.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q11.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q12.out b/regression-test/data/shape_check/tpch_sf1000/shape/q12.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q12.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q12.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q13.out b/regression-test/data/shape_check/tpch_sf1000/shape/q13.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q13.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q13.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q14.out b/regression-test/data/shape_check/tpch_sf1000/shape/q14.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q14.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q14.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q15.out b/regression-test/data/shape_check/tpch_sf1000/shape/q15.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q15.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q15.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q16.out b/regression-test/data/shape_check/tpch_sf1000/shape/q16.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q16.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q16.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q17.out b/regression-test/data/shape_check/tpch_sf1000/shape/q17.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q17.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q17.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q18.out b/regression-test/data/shape_check/tpch_sf1000/shape/q18.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q18.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q18.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q19.out b/regression-test/data/shape_check/tpch_sf1000/shape/q19.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q19.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q19.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q2.out b/regression-test/data/shape_check/tpch_sf1000/shape/q2.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q2.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q2.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.out b/regression-test/data/shape_check/tpch_sf1000/shape/q20-rewrite.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q20-rewrite.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20.out b/regression-test/data/shape_check/tpch_sf1000/shape/q20.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q20.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q20.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q21.out b/regression-test/data/shape_check/tpch_sf1000/shape/q21.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q21.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q21.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q22.out b/regression-test/data/shape_check/tpch_sf1000/shape/q22.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q22.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q22.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q3.out b/regression-test/data/shape_check/tpch_sf1000/shape/q3.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q3.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q3.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q4.out b/regression-test/data/shape_check/tpch_sf1000/shape/q4.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q4.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q4.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q5.out b/regression-test/data/shape_check/tpch_sf1000/shape/q5.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q5.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q5.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q6.out b/regression-test/data/shape_check/tpch_sf1000/shape/q6.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q6.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q6.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q7.out b/regression-test/data/shape_check/tpch_sf1000/shape/q7.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q7.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q7.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q8.out b/regression-test/data/shape_check/tpch_sf1000/shape/q8.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q8.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q8.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q9.out b/regression-test/data/shape_check/tpch_sf1000/shape/q9.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape/q9.out rename to regression-test/data/shape_check/tpch_sf1000/shape/q9.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q1.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q1.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q10.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q10.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q11.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q11.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q12.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q12.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q13.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q13.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q14.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q14.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q15.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q15.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q16.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q16.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q17.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q17.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q18.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q18.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q19.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q19.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q2.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q2.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q20-rewrite.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q20-rewrite.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q20.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q20.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q21.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q21.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q22.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q22.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q3.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q3.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q4.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q4.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q5.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q5.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q6.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q6.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q7.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q7.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q8.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q8.out diff --git a/regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.out b/regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q9.out similarity index 100% rename from regression-test/data/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.out rename to regression-test/data/shape_check/tpch_sf1000/shape_no_stats/q9.out diff --git a/regression-test/suites/new_shapes_p0/clickbench/load.groovy b/regression-test/suites/new_shapes_p0/clickbench/load.groovy deleted file mode 100644 index 8ebc0035d2a22b..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/load.groovy +++ /dev/null @@ -1,149 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Most of the cases are copied from https://github.com/trinodb/trino/tree/master -// /testing/trino-product-tests/src/main/resources/sql-tests/testcases -// and modified by Doris. - -// syntax error: -// q06 q13 q15 -// Test 23 suites, failed 3 suites - -// Note: To filter out tables from sql files, use the following one-liner comamnd -// sed -nr 's/.*tables: (.*)$/\1/gp' /path/to/*.sql | sed -nr 's/,/\n/gp' | sort | uniq -suite("load") { - if (isCloudMode()) { - return - } - - sql """ - DROP TABLE IF EXISTS hits - """ - - sql """ - CREATE TABLE IF NOT EXISTS hits ( - CounterID INT NOT NULL, - EventDate Datev2 NOT NULL, - UserID BIGINT NOT NULL, - EventTime DateTimev2 NOT NULL, - WatchID BIGINT NOT NULL, - JavaEnable SMALLINT NOT NULL, - Title STRING NOT NULL, - GoodEvent SMALLINT NOT NULL, - ClientIP INT NOT NULL, - RegionID INT NOT NULL, - CounterClass SMALLINT NOT NULL, - OS SMALLINT NOT NULL, - UserAgent SMALLINT NOT NULL, - URL STRING NOT NULL, - Referer STRING NOT NULL, - IsRefresh SMALLINT NOT NULL, - RefererCategoryID SMALLINT NOT NULL, - RefererRegionID INT NOT NULL, - URLCategoryID SMALLINT NOT NULL, - URLRegionID INT NOT NULL, - ResolutionWidth SMALLINT NOT NULL, - ResolutionHeight SMALLINT NOT NULL, - ResolutionDepth SMALLINT NOT NULL, - FlashMajor SMALLINT NOT NULL, - FlashMinor SMALLINT NOT NULL, - FlashMinor2 STRING NOT NULL, - NetMajor SMALLINT NOT NULL, - NetMinor SMALLINT NOT NULL, - UserAgentMajor SMALLINT NOT NULL, - UserAgentMinor VARCHAR(255) NOT NULL, - CookieEnable SMALLINT NOT NULL, - JavascriptEnable SMALLINT NOT NULL, - IsMobile SMALLINT NOT NULL, - MobilePhone SMALLINT NOT NULL, - MobilePhoneModel STRING NOT NULL, - Params STRING NOT NULL, - IPNetworkID INT NOT NULL, - TraficSourceID SMALLINT NOT NULL, - SearchEngineID SMALLINT NOT NULL, - SearchPhrase STRING NOT NULL, - AdvEngineID SMALLINT NOT NULL, - IsArtifical SMALLINT NOT NULL, - WindowClientWidth SMALLINT NOT NULL, - WindowClientHeight SMALLINT NOT NULL, - ClientTimeZone SMALLINT NOT NULL, - ClientEventTime DateTimev2 NOT NULL, - SilverlightVersion1 SMALLINT NOT NULL, - SilverlightVersion2 SMALLINT NOT NULL, - SilverlightVersion3 INT NOT NULL, - SilverlightVersion4 SMALLINT NOT NULL, - PageCharset STRING NOT NULL, - CodeVersion INT NOT NULL, - IsLink SMALLINT NOT NULL, - IsDownload SMALLINT NOT NULL, - IsNotBounce SMALLINT NOT NULL, - FUniqID BIGINT NOT NULL, - OriginalURL STRING NOT NULL, - HID INT NOT NULL, - IsOldCounter SMALLINT NOT NULL, - IsEvent SMALLINT NOT NULL, - IsParameter SMALLINT NOT NULL, - DontCountHits SMALLINT NOT NULL, - WithHash SMALLINT NOT NULL, - HitColor CHAR NOT NULL, - LocalEventTime DateTimev2 NOT NULL, - Age SMALLINT NOT NULL, - Sex SMALLINT NOT NULL, - Income SMALLINT NOT NULL, - Interests SMALLINT NOT NULL, - Robotness SMALLINT NOT NULL, - RemoteIP INT NOT NULL, - WindowName INT NOT NULL, - OpenerName INT NOT NULL, - HistoryLength SMALLINT NOT NULL, - BrowserLanguage STRING NOT NULL, - BrowserCountry STRING NOT NULL, - SocialNetwork STRING NOT NULL, - SocialAction STRING NOT NULL, - HTTPError SMALLINT NOT NULL, - SendTiming INT NOT NULL, - DNSTiming INT NOT NULL, - ConnectTiming INT NOT NULL, - ResponseStartTiming INT NOT NULL, - ResponseEndTiming INT NOT NULL, - FetchTiming INT NOT NULL, - SocialSourceNetworkID SMALLINT NOT NULL, - SocialSourcePage STRING NOT NULL, - ParamPrice BIGINT NOT NULL, - ParamOrderID STRING NOT NULL, - ParamCurrency STRING NOT NULL, - ParamCurrencyID SMALLINT NOT NULL, - OpenstatServiceName STRING NOT NULL, - OpenstatCampaignID STRING NOT NULL, - OpenstatAdID STRING NOT NULL, - OpenstatSourceID STRING NOT NULL, - UTMSource STRING NOT NULL, - UTMMedium STRING NOT NULL, - UTMCampaign STRING NOT NULL, - UTMContent STRING NOT NULL, - UTMTerm STRING NOT NULL, - FromTag STRING NOT NULL, - HasGCLID SMALLINT NOT NULL, - RefererHash BIGINT NOT NULL, - URLHash BIGINT NOT NULL, - CLID INT NOT NULL - ) - DUPLICATE KEY (CounterID, EventDate, UserID, EventTime, WatchID) - DISTRIBUTED BY HASH(UserID) BUCKETS 48 - PROPERTIES ( "replication_num"="1"); - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query1.groovy b/regression-test/suites/new_shapes_p0/clickbench/query1.groovy deleted file mode 100644 index e1294c1e2563fd..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query1.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT COUNT(*) FROM hits""" - qt_ckbench_shape_1 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query10.groovy b/regression-test/suites/new_shapes_p0/clickbench/query10.groovy deleted file mode 100644 index 027b06ed7919f5..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query10.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT RegionID, SUM(AdvEngineID), COUNT(*) AS c, AVG(ResolutionWidth), COUNT(DISTINCT UserID) FROM hits GROUP BY RegionID ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_10 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query11.groovy b/regression-test/suites/new_shapes_p0/clickbench/query11.groovy deleted file mode 100644 index c87be65cb43346..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query11.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT MobilePhoneModel, COUNT(DISTINCT UserID) AS u FROM hits WHERE MobilePhoneModel <> '' GROUP BY MobilePhoneModel ORDER BY u DESC LIMIT 10""" - qt_ckbench_shape_11 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query12.groovy b/regression-test/suites/new_shapes_p0/clickbench/query12.groovy deleted file mode 100644 index e58d4d3ff0a4dd..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query12.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT MobilePhone, MobilePhoneModel, COUNT(DISTINCT UserID) AS u FROM hits WHERE MobilePhoneModel <> '' GROUP BY MobilePhone, MobilePhoneModel ORDER BY u DESC LIMIT 10""" - qt_ckbench_shape_12 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query13.groovy b/regression-test/suites/new_shapes_p0/clickbench/query13.groovy deleted file mode 100644 index e156e51bc4cc6d..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query13.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase, COUNT(*) AS c FROM hits WHERE SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_13 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query14.groovy b/regression-test/suites/new_shapes_p0/clickbench/query14.groovy deleted file mode 100644 index 2bb1aa260fe958..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query14.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase, COUNT(DISTINCT UserID) AS u FROM hits WHERE SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY u DESC LIMIT 10""" - qt_ckbench_shape_14 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query15.groovy b/regression-test/suites/new_shapes_p0/clickbench/query15.groovy deleted file mode 100644 index f8becb72a625d4..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query15.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchEngineID, SearchPhrase, COUNT(*) AS c FROM hits WHERE SearchPhrase <> '' GROUP BY SearchEngineID, SearchPhrase ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_15 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query16.groovy b/regression-test/suites/new_shapes_p0/clickbench/query16.groovy deleted file mode 100644 index 4d742899e6b704..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query16.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID, COUNT(*) FROM hits GROUP BY UserID ORDER BY COUNT(*) DESC LIMIT 10""" - qt_ckbench_shape_16 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query17.groovy b/regression-test/suites/new_shapes_p0/clickbench/query17.groovy deleted file mode 100644 index 76307e1167b8ec..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query17.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, SearchPhrase ORDER BY COUNT(*) DESC LIMIT 10""" - qt_ckbench_shape_17 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query18.groovy b/regression-test/suites/new_shapes_p0/clickbench/query18.groovy deleted file mode 100644 index 8a9d8f19b619da..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query18.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, SearchPhrase LIMIT 10""" - qt_ckbench_shape_18 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query19.groovy b/regression-test/suites/new_shapes_p0/clickbench/query19.groovy deleted file mode 100644 index da080ef696aab9..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query19.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID, extract(minute FROM EventTime) AS m, SearchPhrase, COUNT(*) FROM hits GROUP BY UserID, m, SearchPhrase ORDER BY COUNT(*) DESC LIMIT 10""" - qt_ckbench_shape_19 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query2.groovy b/regression-test/suites/new_shapes_p0/clickbench/query2.groovy deleted file mode 100644 index 0deae902dbd810..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query2.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """ - SELECT COUNT(*) FROM hits WHERE AdvEngineID <> 0 - """ - qt_ckbench_shape_2 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query20.groovy b/regression-test/suites/new_shapes_p0/clickbench/query20.groovy deleted file mode 100644 index 2d7fc829508e6e..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query20.groovy +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT UserID FROM hits WHERE UserID = 435090932899640449""" - qt_ckbench_shape_20 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query21.groovy b/regression-test/suites/new_shapes_p0/clickbench/query21.groovy deleted file mode 100644 index 870316ee6ea8c6..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query21.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT COUNT(*) FROM hits WHERE URL LIKE '%google%'""" - qt_ckbench_shape_21 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query22.groovy b/regression-test/suites/new_shapes_p0/clickbench/query22.groovy deleted file mode 100644 index c4e27d56c2b817..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query22.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase, MIN(URL), COUNT(*) AS c FROM hits WHERE URL LIKE '%google%' AND SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_22 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query23.groovy b/regression-test/suites/new_shapes_p0/clickbench/query23.groovy deleted file mode 100644 index ff19e0ec4ea667..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query23.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase, MIN(URL), MIN(Title), COUNT(*) AS c, COUNT(DISTINCT UserID) FROM hits WHERE Title LIKE '%Google%' AND URL NOT LIKE '%.google.%' AND SearchPhrase <> '' GROUP BY SearchPhrase ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_23 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query24.groovy b/regression-test/suites/new_shapes_p0/clickbench/query24.groovy deleted file mode 100644 index 7ea248701a9b6b..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query24.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT * FROM hits WHERE URL LIKE '%google%' ORDER BY EventTime LIMIT 10""" - qt_ckbench_shape_24 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query25.groovy b/regression-test/suites/new_shapes_p0/clickbench/query25.groovy deleted file mode 100644 index d61c7475d137c5..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query25.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY EventTime LIMIT 10""" - qt_ckbench_shape_25 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query26.groovy b/regression-test/suites/new_shapes_p0/clickbench/query26.groovy deleted file mode 100644 index b5cf08c9482011..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query26.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY SearchPhrase LIMIT 10""" - qt_ckbench_shape_26 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query27.groovy b/regression-test/suites/new_shapes_p0/clickbench/query27.groovy deleted file mode 100644 index e72528e97db520..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query27.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchPhrase FROM hits WHERE SearchPhrase <> '' ORDER BY EventTime, SearchPhrase LIMIT 10""" - qt_ckbench_shape_27 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query28.groovy b/regression-test/suites/new_shapes_p0/clickbench/query28.groovy deleted file mode 100644 index 08c2fa6b2505d5..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query28.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT CounterID, AVG(length(URL)) AS l, COUNT(*) AS c FROM hits WHERE URL <> '' GROUP BY CounterID HAVING COUNT(*) > 100000 ORDER BY l DESC LIMIT 25""" - qt_ckbench_shape_28 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query29.groovy b/regression-test/suites/new_shapes_p0/clickbench/query29.groovy deleted file mode 100644 index e855ad7ff7bda3..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query29.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT REGEXP_REPLACE(Referer, '^https?://(?:www\\.)?([^/]+)/.*\$', '\\\\1') AS k, AVG(length(Referer)) AS l, COUNT(*) AS c, MIN(Referer) FROM hits WHERE Referer <> '' GROUP BY k HAVING COUNT(*) > 100000 ORDER BY l DESC LIMIT 25""" - qt_ckbench_shape_29 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query3.groovy b/regression-test/suites/new_shapes_p0/clickbench/query3.groovy deleted file mode 100644 index ac376f98ff2d2a..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query3.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SUM(AdvEngineID), COUNT(*), AVG(ResolutionWidth) FROM hits""" - qt_ckbench_shape_3 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query30.groovy b/regression-test/suites/new_shapes_p0/clickbench/query30.groovy deleted file mode 100644 index 1a9b9f914d4147..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query30.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SUM(ResolutionWidth), SUM(ResolutionWidth + 1), SUM(ResolutionWidth + 2), SUM(ResolutionWidth + 3), SUM(ResolutionWidth + 4), SUM(ResolutionWidth + 5), SUM(ResolutionWidth + 6), SUM(ResolutionWidth + 7), SUM(ResolutionWidth + 8), SUM(ResolutionWidth + 9), SUM(ResolutionWidth + 10), SUM(ResolutionWidth + 11), SUM(ResolutionWidth + 12), SUM(ResolutionWidth + 13), SUM(ResolutionWidth + 14), SUM(ResolutionWidth + 15), SUM(ResolutionWidth + 16), SUM(ResolutionWidth + 17), SUM(ResolutionWidth + 18), SUM(ResolutionWidth + 19), SUM(ResolutionWidth + 20), SUM(ResolutionWidth + 21), SUM(ResolutionWidth + 22), SUM(ResolutionWidth + 23), SUM(ResolutionWidth + 24), SUM(ResolutionWidth + 25), SUM(ResolutionWidth + 26), SUM(ResolutionWidth + 27), SUM(ResolutionWidth + 28), SUM(ResolutionWidth + 29), SUM(ResolutionWidth + 30), SUM(ResolutionWidth + 31), SUM(ResolutionWidth + 32), SUM(ResolutionWidth + 33), SUM(ResolutionWidth + 34), SUM(ResolutionWidth + 35), SUM(ResolutionWidth + 36), SUM(ResolutionWidth + 37), SUM(ResolutionWidth + 38), SUM(ResolutionWidth + 39), SUM(ResolutionWidth + 40), SUM(ResolutionWidth + 41), SUM(ResolutionWidth + 42), SUM(ResolutionWidth + 43), SUM(ResolutionWidth + 44), SUM(ResolutionWidth + 45), SUM(ResolutionWidth + 46), SUM(ResolutionWidth + 47), SUM(ResolutionWidth + 48), SUM(ResolutionWidth + 49), SUM(ResolutionWidth + 50), SUM(ResolutionWidth + 51), SUM(ResolutionWidth + 52), SUM(ResolutionWidth + 53), SUM(ResolutionWidth + 54), SUM(ResolutionWidth + 55), SUM(ResolutionWidth + 56), SUM(ResolutionWidth + 57), SUM(ResolutionWidth + 58), SUM(ResolutionWidth + 59), SUM(ResolutionWidth + 60), SUM(ResolutionWidth + 61), SUM(ResolutionWidth + 62), SUM(ResolutionWidth + 63), SUM(ResolutionWidth + 64), SUM(ResolutionWidth + 65), SUM(ResolutionWidth + 66), SUM(ResolutionWidth + 67), SUM(ResolutionWidth + 68), SUM(ResolutionWidth + 69), SUM(ResolutionWidth + 70), SUM(ResolutionWidth + 71), SUM(ResolutionWidth + 72), SUM(ResolutionWidth + 73), SUM(ResolutionWidth + 74), SUM(ResolutionWidth + 75), SUM(ResolutionWidth + 76), SUM(ResolutionWidth + 77), SUM(ResolutionWidth + 78), SUM(ResolutionWidth + 79), SUM(ResolutionWidth + 80), SUM(ResolutionWidth + 81), SUM(ResolutionWidth + 82), SUM(ResolutionWidth + 83), SUM(ResolutionWidth + 84), SUM(ResolutionWidth + 85), SUM(ResolutionWidth + 86), SUM(ResolutionWidth + 87), SUM(ResolutionWidth + 88), SUM(ResolutionWidth + 89) FROM hits""" - qt_ckbench_shape_30 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query31.groovy b/regression-test/suites/new_shapes_p0/clickbench/query31.groovy deleted file mode 100644 index 4e0dab037fb90e..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query31.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT SearchEngineID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits WHERE SearchPhrase <> '' GROUP BY SearchEngineID, ClientIP ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_31 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query32.groovy b/regression-test/suites/new_shapes_p0/clickbench/query32.groovy deleted file mode 100644 index a4fc0f66d2d59c..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query32.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT WatchID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits WHERE SearchPhrase <> '' GROUP BY WatchID, ClientIP ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_32 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query33.groovy b/regression-test/suites/new_shapes_p0/clickbench/query33.groovy deleted file mode 100644 index 31d89c03410dcb..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query33.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT WatchID, ClientIP, COUNT(*) AS c, SUM(IsRefresh), AVG(ResolutionWidth) FROM hits GROUP BY WatchID, ClientIP ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_33 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query34.groovy b/regression-test/suites/new_shapes_p0/clickbench/query34.groovy deleted file mode 100644 index 144dbd8579ef35..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query34.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT URL, COUNT(*) AS c FROM hits GROUP BY URL ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_34 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query35.groovy b/regression-test/suites/new_shapes_p0/clickbench/query35.groovy deleted file mode 100644 index 14b91739e89a5a..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query35.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT 1, URL, COUNT(*) AS c FROM hits GROUP BY 1, URL ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_35 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query36.groovy b/regression-test/suites/new_shapes_p0/clickbench/query36.groovy deleted file mode 100644 index 4067c45ff16b75..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query36.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT ClientIP, ClientIP - 1, ClientIP - 2, ClientIP - 3, COUNT(*) AS c FROM hits GROUP BY ClientIP, ClientIP - 1, ClientIP - 2, ClientIP - 3 ORDER BY c DESC LIMIT 10""" - qt_ckbench_shape_36 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query37.groovy b/regression-test/suites/new_shapes_p0/clickbench/query37.groovy deleted file mode 100644 index c1b3882e96eb00..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query37.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT URL, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND DontCountHits = 0 AND IsRefresh = 0 AND URL <> '' GROUP BY URL ORDER BY PageViews DESC LIMIT 10""" - qt_ckbench_shape_37 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query38.groovy b/regression-test/suites/new_shapes_p0/clickbench/query38.groovy deleted file mode 100644 index da44d88d84595a..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query38.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT Title, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND DontCountHits = 0 AND IsRefresh = 0 AND Title <> '' GROUP BY Title ORDER BY PageViews DESC LIMIT 10""" - qt_ckbench_shape_38 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query39.groovy b/regression-test/suites/new_shapes_p0/clickbench/query39.groovy deleted file mode 100644 index 9aaf41f0303988..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query39.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT URL, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND IsLink <> 0 AND IsDownload = 0 GROUP BY URL ORDER BY PageViews DESC LIMIT 10 OFFSET 1000""" - qt_ckbench_shape_39 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query4.groovy b/regression-test/suites/new_shapes_p0/clickbench/query4.groovy deleted file mode 100644 index 5f2727592daee5..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query4.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT AVG(UserID) FROM hits""" - qt_ckbench_shape_4 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query40.groovy b/regression-test/suites/new_shapes_p0/clickbench/query40.groovy deleted file mode 100644 index a256bd9d40dbea..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query40.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT TraficSourceID, SearchEngineID, AdvEngineID, CASE WHEN (SearchEngineID = 0 AND AdvEngineID = 0) THEN Referer ELSE '' END AS Src, URL AS Dst, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 GROUP BY TraficSourceID, SearchEngineID, AdvEngineID, Src, Dst ORDER BY PageViews DESC LIMIT 10 OFFSET 1000""" - qt_ckbench_shape_40 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query41.groovy b/regression-test/suites/new_shapes_p0/clickbench/query41.groovy deleted file mode 100644 index e0617d1e5d6a32..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query41.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT URLHash, EventDate, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND TraficSourceID IN (-1, 6) AND RefererHash = 3594120000172545465 GROUP BY URLHash, EventDate ORDER BY PageViews DESC LIMIT 10 OFFSET 100""" - qt_ckbench_shape_41 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query42.groovy b/regression-test/suites/new_shapes_p0/clickbench/query42.groovy deleted file mode 100644 index cd53a2a7d4d030..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query42.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT WindowClientWidth, WindowClientHeight, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-01' AND EventDate <= '2013-07-31' AND IsRefresh = 0 AND DontCountHits = 0 AND URLHash = 2868770270353813622 GROUP BY WindowClientWidth, WindowClientHeight ORDER BY PageViews DESC LIMIT 10 OFFSET 10000""" - qt_ckbench_shape_42 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query43.groovy b/regression-test/suites/new_shapes_p0/clickbench/query43.groovy deleted file mode 100644 index 8de968093369a9..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query43.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT DATE_FORMAT(EventTime, '%Y-%m-%d %H:%i:00') AS M, COUNT(*) AS PageViews FROM hits WHERE CounterID = 62 AND EventDate >= '2013-07-14' AND EventDate <= '2013-07-15' AND IsRefresh = 0 AND DontCountHits = 0 GROUP BY DATE_FORMAT(EventTime, '%Y-%m-%d %H:%i:00') ORDER BY DATE_FORMAT(EventTime, '%Y-%m-%d %H:%i:00') LIMIT 10 OFFSET 1000""" - qt_ckbench_shape_43 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query5.groovy b/regression-test/suites/new_shapes_p0/clickbench/query5.groovy deleted file mode 100644 index cf727f94101dee..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query5.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT COUNT(DISTINCT UserID) FROM hits""" - qt_ckbench_shape_5 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query6.groovy b/regression-test/suites/new_shapes_p0/clickbench/query6.groovy deleted file mode 100644 index 3ec9bbbe25b2d3..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query6.groovy +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT COUNT(DISTINCT SearchPhrase) FROM hits""" - qt_ckbench_shape_6 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query7.groovy b/regression-test/suites/new_shapes_p0/clickbench/query7.groovy deleted file mode 100644 index 1c9d80a33709f8..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query7.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT MIN(EventDate), MAX(EventDate) FROM hits""" - qt_ckbench_shape_7 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query8.groovy b/regression-test/suites/new_shapes_p0/clickbench/query8.groovy deleted file mode 100644 index 36c1322ff5a7ef..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query8.groovy +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT AdvEngineID, COUNT(*) FROM hits WHERE AdvEngineID <> 0 GROUP BY AdvEngineID ORDER BY COUNT(*) DESC""" - qt_ckbench_shape_8 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/clickbench/query9.groovy b/regression-test/suites/new_shapes_p0/clickbench/query9.groovy deleted file mode 100644 index 91a8184de49c97..00000000000000 --- a/regression-test/suites/new_shapes_p0/clickbench/query9.groovy +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set topn_opt_limit_threshold = 1024' - def ckBench = """SELECT RegionID, COUNT(DISTINCT UserID) AS u FROM hits GROUP BY RegionID ORDER BY u DESC LIMIT 10 -""" - qt_ckbench_shape_9 """ - explain shape plan - ${ckBench} - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/gen_shape.py b/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/gen_shape.py deleted file mode 100644 index 8317bd1859f261..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/gen_shape.py +++ /dev/null @@ -1,26 +0,0 @@ -# // Licensed to the Apache Software Foundation (ASF) under one -# // or more contributor license agreements. See the NOTICE file -# // distributed with this work for additional information -# // regarding copyright ownership. The ASF licenses this file -# // to you under the Apache License, Version 2.0 (the -# // "License"); you may not use this file except in compliance -# // with the License. You may obtain a copy of the License at -# // -# // http://www.apache.org/licenses/LICENSE-2.0 -# // -# // Unless required by applicable law or agreed to in writing, -# // software distributed under the License is distributed on an -# // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# // KIND, either express or implied. See the License for the -# // specific language governing permissions and limitations -# // under the License. -if __name__ == '__main__': - with open('shape.tmpl', 'r') as f: - tmpl = f.read() - for i in range(1,100): - with open('../../../../tools/tpcds-tools/queries/sf1000/query'+str(i)+'.sql', 'r') as fi: - casei = tmpl.replace('{--}', str(i)) - casei = casei.replace('{query}', fi.read().split(";")[0]) - - with open('../shape/query'+str(i)+'.groovy', 'w') as out: - out.write(casei) \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/shape.tmpl b/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/shape.tmpl deleted file mode 100644 index c25fd3f36b03f4..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/ddl/shape.tmpl +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query{--}") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - def ds = """{query}""" - qt_ds_shape_{--} ''' - explain shape plan - {query} - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/load.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/load.groovy deleted file mode 100644 index b2afbd31d7b5a2..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/load.groovy +++ /dev/null @@ -1,812 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - - sql ''' - drop table if exists customer_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_demographics ( - cd_demo_sk bigint not null, - cd_gender char(1), - cd_marital_status char(1), - cd_education_status char(20), - cd_purchase_estimate integer, - cd_credit_rating char(10), - cd_dep_count integer, - cd_dep_employed_count integer, - cd_dep_college_count integer - ) - DUPLICATE KEY(cd_demo_sk) - DISTRIBUTED BY HASH(cd_gender) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists reason - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS reason ( - r_reason_sk bigint not null, - r_reason_id char(16) not null, - r_reason_desc char(100) - ) - DUPLICATE KEY(r_reason_sk) - DISTRIBUTED BY HASH(r_reason_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists date_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS date_dim ( - d_date_sk bigint not null, - d_date_id char(16) not null, - d_date datev2, - d_month_seq integer, - d_week_seq integer, - d_quarter_seq integer, - d_year integer, - d_dow integer, - d_moy integer, - d_dom integer, - d_qoy integer, - d_fy_year integer, - d_fy_quarter_seq integer, - d_fy_week_seq integer, - d_day_name char(9), - d_quarter_name char(6), - d_holiday char(1), - d_weekend char(1), - d_following_holiday char(1), - d_first_dom integer, - d_last_dom integer, - d_same_day_ly integer, - d_same_day_lq integer, - d_current_day char(1), - d_current_week char(1), - d_current_month char(1), - d_current_quarter char(1), - d_current_year char(1) - ) - DUPLICATE KEY(d_date_sk) - DISTRIBUTED BY HASH(d_date_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists warehouse - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS warehouse ( - w_warehouse_sk bigint not null, - w_warehouse_id char(16) not null, - w_warehouse_name varchar(20), - w_warehouse_sq_ft integer, - w_street_number char(10), - w_street_name varchar(60), - w_street_type char(15), - w_suite_number char(10), - w_city varchar(60), - w_county varchar(30), - w_state char(2), - w_zip char(10), - w_country varchar(20), - w_gmt_offset decimalv3(5,2) - ) - DUPLICATE KEY(w_warehouse_sk) - DISTRIBUTED BY HASH(w_warehouse_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_sales ( - cs_sold_date_sk bigint, - cs_item_sk bigint not null, - cs_order_number bigint not null, - cs_sold_time_sk bigint, - cs_ship_date_sk bigint, - cs_bill_customer_sk bigint, - cs_bill_cdemo_sk bigint, - cs_bill_hdemo_sk bigint, - cs_bill_addr_sk bigint, - cs_ship_customer_sk bigint, - cs_ship_cdemo_sk bigint, - cs_ship_hdemo_sk bigint, - cs_ship_addr_sk bigint, - cs_call_center_sk bigint, - cs_catalog_page_sk bigint, - cs_ship_mode_sk bigint, - cs_warehouse_sk bigint, - cs_promo_sk bigint, - cs_quantity integer, - cs_wholesale_cost decimalv3(7,2), - cs_list_price decimalv3(7,2), - cs_sales_price decimalv3(7,2), - cs_ext_discount_amt decimalv3(7,2), - cs_ext_sales_price decimalv3(7,2), - cs_ext_wholesale_cost decimalv3(7,2), - cs_ext_list_price decimalv3(7,2), - cs_ext_tax decimalv3(7,2), - cs_coupon_amt decimalv3(7,2), - cs_ext_ship_cost decimalv3(7,2), - cs_net_paid decimalv3(7,2), - cs_net_paid_inc_tax decimalv3(7,2), - cs_net_paid_inc_ship decimalv3(7,2), - cs_net_paid_inc_ship_tax decimalv3(7,2), - cs_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(cs_sold_date_sk, cs_item_sk) - DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists call_center - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS call_center ( - cc_call_center_sk bigint not null, - cc_call_center_id char(16) not null, - cc_rec_start_date datev2, - cc_rec_end_date datev2, - cc_closed_date_sk integer, - cc_open_date_sk integer, - cc_name varchar(50), - cc_class varchar(50), - cc_employees integer, - cc_sq_ft integer, - cc_hours char(20), - cc_manager varchar(40), - cc_mkt_id integer, - cc_mkt_class char(50), - cc_mkt_desc varchar(100), - cc_market_manager varchar(40), - cc_division integer, - cc_division_name varchar(50), - cc_company integer, - cc_company_name char(50), - cc_street_number char(10), - cc_street_name varchar(60), - cc_street_type char(15), - cc_suite_number char(10), - cc_city varchar(60), - cc_county varchar(30), - cc_state char(2), - cc_zip char(10), - cc_country varchar(20), - cc_gmt_offset decimalv3(5,2), - cc_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(cc_call_center_sk) - DISTRIBUTED BY HASH(cc_call_center_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists inventory - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS inventory ( - inv_date_sk bigint not null, - inv_item_sk bigint not null, - inv_warehouse_sk bigint, - inv_quantity_on_hand integer - ) - DUPLICATE KEY(inv_date_sk, inv_item_sk, inv_warehouse_sk) - DISTRIBUTED BY HASH(inv_date_sk, inv_item_sk, inv_warehouse_sk) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_returns ( - cr_item_sk bigint not null, - cr_order_number bigint not null, - cr_returned_date_sk bigint, - cr_returned_time_sk bigint, - cr_refunded_customer_sk bigint, - cr_refunded_cdemo_sk bigint, - cr_refunded_hdemo_sk bigint, - cr_refunded_addr_sk bigint, - cr_returning_customer_sk bigint, - cr_returning_cdemo_sk bigint, - cr_returning_hdemo_sk bigint, - cr_returning_addr_sk bigint, - cr_call_center_sk bigint, - cr_catalog_page_sk bigint, - cr_ship_mode_sk bigint, - cr_warehouse_sk bigint, - cr_reason_sk bigint, - cr_return_quantity integer, - cr_return_amount decimalv3(7,2), - cr_return_tax decimalv3(7,2), - cr_return_amt_inc_tax decimalv3(7,2), - cr_fee decimalv3(7,2), - cr_return_ship_cost decimalv3(7,2), - cr_refunded_cash decimalv3(7,2), - cr_reversed_charge decimalv3(7,2), - cr_store_credit decimalv3(7,2), - cr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(cr_item_sk, cr_order_number) - DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists household_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS household_demographics ( - hd_demo_sk bigint not null, - hd_income_band_sk bigint, - hd_buy_potential char(15), - hd_dep_count integer, - hd_vehicle_count integer - ) - DUPLICATE KEY(hd_demo_sk) - DISTRIBUTED BY HASH(hd_demo_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer_address - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_address ( - ca_address_sk bigint not null, - ca_address_id char(16) not null, - ca_street_number char(10), - ca_street_name varchar(60), - ca_street_type char(15), - ca_suite_number char(10), - ca_city varchar(60), - ca_county varchar(30), - ca_state char(2), - ca_zip char(10), - ca_country varchar(20), - ca_gmt_offset decimalv3(5,2), - ca_location_type char(20) - ) - DUPLICATE KEY(ca_address_sk) - DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists income_band - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS income_band ( - ib_income_band_sk bigint not null, - ib_lower_bound integer, - ib_upper_bound integer - ) - DUPLICATE KEY(ib_income_band_sk) - DISTRIBUTED BY HASH(ib_income_band_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_page ( - cp_catalog_page_sk bigint not null, - cp_catalog_page_id char(16) not null, - cp_start_date_sk integer, - cp_end_date_sk integer, - cp_department varchar(50), - cp_catalog_number integer, - cp_catalog_page_number integer, - cp_description varchar(100), - cp_type varchar(100) - ) - DUPLICATE KEY(cp_catalog_page_sk) - DISTRIBUTED BY HASH(cp_catalog_page_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists item - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS item ( - i_item_sk bigint not null, - i_item_id char(16) not null, - i_rec_start_date datev2, - i_rec_end_date datev2, - i_item_desc varchar(200), - i_current_price decimalv3(7,2), - i_wholesale_cost decimalv3(7,2), - i_brand_id integer, - i_brand char(50), - i_class_id integer, - i_class char(50), - i_category_id integer, - i_category char(50), - i_manufact_id integer, - i_manufact char(50), - i_size char(20), - i_formulation char(20), - i_color char(20), - i_units char(10), - i_container char(10), - i_manager_id integer, - i_product_name char(50) - ) - DUPLICATE KEY(i_item_sk) - DISTRIBUTED BY HASH(i_item_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_returns ( - wr_item_sk bigint not null, - wr_order_number bigint not null, - wr_returned_date_sk bigint, - wr_returned_time_sk bigint, - wr_refunded_customer_sk bigint, - wr_refunded_cdemo_sk bigint, - wr_refunded_hdemo_sk bigint, - wr_refunded_addr_sk bigint, - wr_returning_customer_sk bigint, - wr_returning_cdemo_sk bigint, - wr_returning_hdemo_sk bigint, - wr_returning_addr_sk bigint, - wr_web_page_sk bigint, - wr_reason_sk bigint, - wr_return_quantity integer, - wr_return_amt decimalv3(7,2), - wr_return_tax decimalv3(7,2), - wr_return_amt_inc_tax decimalv3(7,2), - wr_fee decimalv3(7,2), - wr_return_ship_cost decimalv3(7,2), - wr_refunded_cash decimalv3(7,2), - wr_reversed_charge decimalv3(7,2), - wr_account_credit decimalv3(7,2), - wr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(wr_item_sk, wr_order_number) - DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists web_site - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_site ( - web_site_sk bigint not null, - web_site_id char(16) not null, - web_rec_start_date datev2, - web_rec_end_date datev2, - web_name varchar(50), - web_open_date_sk bigint, - web_close_date_sk bigint, - web_class varchar(50), - web_manager varchar(40), - web_mkt_id integer, - web_mkt_class varchar(50), - web_mkt_desc varchar(100), - web_market_manager varchar(40), - web_company_id integer, - web_company_name char(50), - web_street_number char(10), - web_street_name varchar(60), - web_street_type char(15), - web_suite_number char(10), - web_city varchar(60), - web_county varchar(30), - web_state char(2), - web_zip char(10), - web_country varchar(20), - web_gmt_offset decimalv3(5,2), - web_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(web_site_sk) - DISTRIBUTED BY HASH(web_site_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists promotion - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS promotion ( - p_promo_sk bigint not null, - p_promo_id char(16) not null, - p_start_date_sk bigint, - p_end_date_sk bigint, - p_item_sk bigint, - p_cost decimalv3(15,2), - p_response_targe integer, - p_promo_name char(50), - p_channel_dmail char(1), - p_channel_email char(1), - p_channel_catalog char(1), - p_channel_tv char(1), - p_channel_radio char(1), - p_channel_press char(1), - p_channel_event char(1), - p_channel_demo char(1), - p_channel_details varchar(100), - p_purpose char(15), - p_discount_active char(1) - ) - DUPLICATE KEY(p_promo_sk) - DISTRIBUTED BY HASH(p_promo_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_sales ( - ws_sold_date_sk bigint, - ws_item_sk bigint not null, - ws_order_number bigint not null, - ws_sold_time_sk bigint, - ws_ship_date_sk bigint, - ws_bill_customer_sk bigint, - ws_bill_cdemo_sk bigint, - ws_bill_hdemo_sk bigint, - ws_bill_addr_sk bigint, - ws_ship_customer_sk bigint, - ws_ship_cdemo_sk bigint, - ws_ship_hdemo_sk bigint, - ws_ship_addr_sk bigint, - ws_web_page_sk bigint, - ws_web_site_sk bigint, - ws_ship_mode_sk bigint, - ws_warehouse_sk bigint, - ws_promo_sk bigint, - ws_quantity integer, - ws_wholesale_cost decimalv3(7,2), - ws_list_price decimalv3(7,2), - ws_sales_price decimalv3(7,2), - ws_ext_discount_amt decimalv3(7,2), - ws_ext_sales_price decimalv3(7,2), - ws_ext_wholesale_cost decimalv3(7,2), - ws_ext_list_price decimalv3(7,2), - ws_ext_tax decimalv3(7,2), - ws_coupon_amt decimalv3(7,2), - ws_ext_ship_cost decimalv3(7,2), - ws_net_paid decimalv3(7,2), - ws_net_paid_inc_tax decimalv3(7,2), - ws_net_paid_inc_ship decimalv3(7,2), - ws_net_paid_inc_ship_tax decimalv3(7,2), - ws_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ws_sold_date_sk, ws_item_sk) - DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists store - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store ( - s_store_sk bigint not null, - s_store_id char(16) not null, - s_rec_start_date datev2, - s_rec_end_date datev2, - s_closed_date_sk bigint, - s_store_name varchar(50), - s_number_employees integer, - s_floor_space integer, - s_hours char(20), - s_manager varchar(40), - s_market_id integer, - s_geography_class varchar(100), - s_market_desc varchar(100), - s_market_manager varchar(40), - s_division_id integer, - s_division_name varchar(50), - s_company_id integer, - s_company_name varchar(50), - s_street_number varchar(10), - s_street_name varchar(60), - s_street_type char(15), - s_suite_number char(10), - s_city varchar(60), - s_county varchar(30), - s_state char(2), - s_zip char(10), - s_country varchar(20), - s_gmt_offset decimalv3(5,2), - s_tax_precentage decimalv3(5,2) - ) - DUPLICATE KEY(s_store_sk) - DISTRIBUTED BY HASH(s_store_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists time_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS time_dim ( - t_time_sk bigint not null, - t_time_id char(16) not null, - t_time integer, - t_hour integer, - t_minute integer, - t_second integer, - t_am_pm char(2), - t_shift char(20), - t_sub_shift char(20), - t_meal_time char(20) - ) - DUPLICATE KEY(t_time_sk) - DISTRIBUTED BY HASH(t_time_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_page ( - wp_web_page_sk bigint not null, - wp_web_page_id char(16) not null, - wp_rec_start_date datev2, - wp_rec_end_date datev2, - wp_creation_date_sk bigint, - wp_access_date_sk bigint, - wp_autogen_flag char(1), - wp_customer_sk bigint, - wp_url varchar(100), - wp_type char(50), - wp_char_count integer, - wp_link_count integer, - wp_image_count integer, - wp_max_ad_count integer - ) - DUPLICATE KEY(wp_web_page_sk) - DISTRIBUTED BY HASH(wp_web_page_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists store_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_returns ( - sr_item_sk bigint not null, - sr_ticket_number bigint not null, - sr_returned_date_sk bigint, - sr_return_time_sk bigint, - sr_customer_sk bigint, - sr_cdemo_sk bigint, - sr_hdemo_sk bigint, - sr_addr_sk bigint, - sr_store_sk bigint, - sr_reason_sk bigint, - sr_return_quantity integer, - sr_return_amt decimalv3(7,2), - sr_return_tax decimalv3(7,2), - sr_return_amt_inc_tax decimalv3(7,2), - sr_fee decimalv3(7,2), - sr_return_ship_cost decimalv3(7,2), - sr_refunded_cash decimalv3(7,2), - sr_reversed_charge decimalv3(7,2), - sr_store_credit decimalv3(7,2), - sr_net_loss decimalv3(7,2) - ) - duplicate key(sr_item_sk, sr_ticket_number) - distributed by hash (sr_item_sk, sr_ticket_number) buckets 32 - properties ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists store_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_sales ( - ss_sold_date_sk bigint, - ss_item_sk bigint not null, - ss_ticket_number bigint not null, - ss_sold_time_sk bigint, - ss_customer_sk bigint, - ss_cdemo_sk bigint, - ss_hdemo_sk bigint, - ss_addr_sk bigint, - ss_store_sk bigint, - ss_promo_sk bigint, - ss_quantity integer, - ss_wholesale_cost decimalv3(7,2), - ss_list_price decimalv3(7,2), - ss_sales_price decimalv3(7,2), - ss_ext_discount_amt decimalv3(7,2), - ss_ext_sales_price decimalv3(7,2), - ss_ext_wholesale_cost decimalv3(7,2), - ss_ext_list_price decimalv3(7,2), - ss_ext_tax decimalv3(7,2), - ss_coupon_amt decimalv3(7,2), - ss_net_paid decimalv3(7,2), - ss_net_paid_inc_tax decimalv3(7,2), - ss_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ss_sold_date_sk, ss_item_sk) - DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists ship_mode - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS ship_mode ( - sm_ship_mode_sk bigint not null, - sm_ship_mode_id char(16) not null, - sm_type char(30), - sm_code char(10), - sm_carrier char(20), - sm_contract char(20) - ) - DUPLICATE KEY(sm_ship_mode_sk) - DISTRIBUTED BY HASH(sm_ship_mode_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer ( - c_customer_sk bigint not null, - c_customer_id char(16) not null, - c_current_cdemo_sk bigint, - c_current_hdemo_sk bigint, - c_current_addr_sk bigint, - c_first_shipto_date_sk bigint, - c_first_sales_date_sk bigint, - c_salutation char(10), - c_first_name char(20), - c_last_name char(30), - c_preferred_cust_flag char(1), - c_birth_day integer, - c_birth_month integer, - c_birth_year integer, - c_birth_country varchar(20), - c_login char(13), - c_email_address char(50), - c_last_review_date_sk bigint - ) - DUPLICATE KEY(c_customer_sk) - DISTRIBUTED BY HASH(c_customer_id) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists dbgen_version - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS dbgen_version - ( - dv_version varchar(16) , - dv_create_date datev2 , - dv_create_time datetime , - dv_cmdline_args varchar(200) - ) - DUPLICATE KEY(dv_version) - DISTRIBUTED BY HASH(dv_version) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query1.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query1.groovy deleted file mode 100644 index d43c8dfb357846..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query1.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'TN' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100""" - qt_ds_shape_1 ''' - explain shape plan - with customer_total_return as -(select -/*+ leading(store_returns broadcast date_dim) */ -sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select - c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'TN' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query24.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query24.groovy deleted file mode 100644 index 01b4beedbdf434..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query24.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_paid) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=5 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'aquamarine' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name -""" - qt_ds_shape_24 ''' - explain shape plan - with ssales as -(select -/*+ leading(store_sales broadcast store shuffle {customer shuffle customer_address} shuffle item shuffle store_returns) */ -c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_paid) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=5 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'aquamarine' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query64.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query64.groovy deleted file mode 100644 index 950e9416d2c47e..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query64.groovy +++ /dev/null @@ -1,284 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('orange','lace','lawn','misty','blush','pink') and - i_current_price between 48 and 48 + 10 and - i_current_price between 48 + 1 and 48 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 1999 and - cs2.syear = 1999 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1""" - qt_ds_shape_64 ''' - explain shape plan - with cs_ui as - (select - /*+ leading(catalog_sales shuffle catalog_returns) */ - cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select - /*+ leading( {store_sales {{customer d2} cd2}} cd1 d3 item {hd1 ib1} store_returns ad1 hd2 ad2 ib2 d1 store promotion cs_ui) */ - i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('orange','lace','lawn','misty','blush','pink') and - i_current_price between 48 and 48 + 10 and - i_current_price between 48 + 1 and 48 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select -/*+ leading(cs1 shuffle cs2) */ - cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 1999 and - cs2.syear = 1999 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query67.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query67.groovy deleted file mode 100644 index ce258f814d8047..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query67.groovy +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1217 and 1217+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100""" - qt_ds_shape_67 ''' - explain shape plan - select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select - /*+ leading(store_sales broadcast date_dim broadcast store broadcast item) */ - i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1217 and 1217+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query72.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query72.groovy deleted file mode 100644 index 76efe58c0ad620..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query72.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '1001-5000' - and d1.d_year = 1998 - and cd_marital_status = 'S' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100""" - qt_ds_shape_72 ''' - explain shape plan - select - /*+ leading( inventory shuffle { catalog_returns shuffle {catalog_sales shuffle {d3 broadcast d1} broadcast household_demographics shuffle customer_demographics broadcast promotion shuffle item} broadcast d2} broadcast warehouse) */ - i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '1001-5000' - and d1.d_year = 1998 - and cd_marital_status = 'S' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query78.groovy b/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query78.groovy deleted file mode 100644 index a96d26a95caf78..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpcds/shape/query78.groovy +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null and d_year=1998 - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null and d_year=1998 - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null and d_year=1998 - group by d_year, ss_item_sk, ss_customer_sk - ) -select -ss_customer_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=1998 -order by - ss_customer_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100""" - qt_ds_shape_78 ''' - explain shape plan - with ws as - (select - /*+ leading(web_sales broadcast date_dim web_returns) */ - d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null and d_year=1998 - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select - /*+ leading(catalog_sales broadcast date_dim catalog_returns) */ - d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null and d_year=1998 - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select - /*+ leading(store_sales broadcast date_dim store_returns) */ - d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null and d_year=1998 - group by d_year, ss_item_sk, ss_customer_sk - ) -select -/*+ leading(ss shuffle ws shuffle cs) */ -ss_customer_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=1998 -order by - ss_customer_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/load.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/load.groovy deleted file mode 100644 index fc89f02d977e9c..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/load.groovy +++ /dev/null @@ -1,226 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - sql """ - drop table if exists lineitem; - """ - sql """ - CREATE TABLE lineitem ( - l_shipdate DATEV2 NOT NULL, - l_orderkey bigint NOT NULL, - l_linenumber int not null, - l_partkey int NOT NULL, - l_suppkey int not null, - l_quantity decimal(15, 2) NOT NULL, - l_extendedprice decimal(15, 2) NOT NULL, - l_discount decimal(15, 2) NOT NULL, - l_tax decimal(15, 2) NOT NULL, - l_returnflag VARCHAR(1) NOT NULL, - l_linestatus VARCHAR(1) NOT NULL, - l_commitdate DATEV2 NOT NULL, - l_receiptdate DATEV2 NOT NULL, - l_shipinstruct VARCHAR(25) NOT NULL, - l_shipmode VARCHAR(10) NOT NULL, - l_comment VARCHAR(44) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`l_shipdate`, `l_orderkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 96 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "lineitem_orders" - ); - """ - - sql """ - drop table if exists orders; - """ - - sql ''' - CREATE TABLE orders ( - o_orderkey bigint NOT NULL, - o_orderdate DATEV2 NOT NULL, - o_custkey int NOT NULL, - o_orderstatus VARCHAR(1) NOT NULL, - o_totalprice decimal(15, 2) NOT NULL, - o_orderpriority VARCHAR(15) NOT NULL, - o_clerk VARCHAR(15) NOT NULL, - o_shippriority int NOT NULL, - o_comment VARCHAR(79) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`o_orderkey`, `o_orderdate`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`o_orderkey`) BUCKETS 96 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "lineitem_orders" - ); ''' - - sql ''' - drop table if exists partsupp; - ''' - - sql ''' - CREATE TABLE partsupp ( - ps_partkey int NOT NULL, - ps_suppkey int NOT NULL, - ps_availqty int NOT NULL, - ps_supplycost decimal(15, 2) NOT NULL, - ps_comment VARCHAR(199) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`ps_partkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`ps_partkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "part_partsupp" - ); - ''' - - sql ''' - drop table if exists part; - ''' - - sql ''' - CREATE TABLE part ( - p_partkey int NOT NULL, - p_name VARCHAR(55) NOT NULL, - p_mfgr VARCHAR(25) NOT NULL, - p_brand VARCHAR(10) NOT NULL, - p_type VARCHAR(25) NOT NULL, - p_size int NOT NULL, - p_container VARCHAR(10) NOT NULL, - p_retailprice decimal(15, 2) NOT NULL, - p_comment VARCHAR(23) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`p_partkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "part_partsupp" - ); - ''' - - sql ''' - drop table if exists customer; - ''' - - sql ''' - CREATE TABLE customer ( - c_custkey int NOT NULL, - c_name VARCHAR(25) NOT NULL, - c_address VARCHAR(40) NOT NULL, - c_nationkey int NOT NULL, - c_phone VARCHAR(15) NOT NULL, - c_acctbal decimal(15, 2) NOT NULL, - c_mktsegment VARCHAR(10) NOT NULL, - c_comment VARCHAR(117) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`c_custkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists supplier - ''' - - sql ''' - CREATE TABLE supplier ( - s_suppkey int NOT NULL, - s_name VARCHAR(25) NOT NULL, - s_address VARCHAR(40) NOT NULL, - s_nationkey int NOT NULL, - s_phone VARCHAR(15) NOT NULL, - s_acctbal decimal(15, 2) NOT NULL, - s_comment VARCHAR(101) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`s_suppkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists nation; - ''' - - sql ''' - CREATE TABLE `nation` ( - `n_nationkey` int(11) NOT NULL, - `n_name` varchar(25) NOT NULL, - `n_regionkey` int(11) NOT NULL, - `n_comment` varchar(152) NULL - ) ENGINE=OLAP - DUPLICATE KEY(`N_NATIONKEY`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`N_NATIONKEY`) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists region; - ''' - - sql ''' - CREATE TABLE region ( - r_regionkey int NOT NULL, - r_name VARCHAR(25) NOT NULL, - r_comment VARCHAR(152) - )ENGINE=OLAP - DUPLICATE KEY(`r_regionkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`r_regionkey`) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop view if exists revenue0; - ''' - - sql ''' - create view revenue0 (supplier_no, total_revenue) as - select - l_suppkey, - sum(l_extendedprice * (1 - l_discount)) - from - lineitem - where - l_shipdate >= date '1996-01-01' - and l_shipdate < date '1996-01-01' + interval '3' month - group by - l_suppkey; - ''' -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q10.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q10.groovy deleted file mode 100644 index 9322e2485a0221..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q10.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(lineitem shuffle {{customer shuffle orders} broadcast nation}) */ - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q11.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q11.groovy deleted file mode 100644 index 1581c654ecdc73..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q11.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - - - - qt_select """ - explain shape plan - select - /*+ leading(partsupp {supplier nation}) */ - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - /*+ leading(partsupp {supplier nation}) */ - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q12.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q12.groovy deleted file mode 100644 index c55966a2bfb546..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q12.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(orders lineitem) */ - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q13.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q13.groovy deleted file mode 100644 index 54a83a15265010..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q13.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(orders shuffle customer) */ - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q14.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q14.groovy deleted file mode 100644 index d1a05921a7ddd7..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q14.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(part lineitem) */ - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q15.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q15.groovy deleted file mode 100644 index 70bc802e3220a7..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q15.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - - qt_select """ - explain shape plan - select - /*+ leading(supplier revenue0) */ - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q17.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q17.groovy deleted file mode 100644 index aa595d59bce9ed..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q17.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - - qt_select """ - explain shape plan - select - /*+ leading(lineitem broadcast part) */ - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q19.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q19.groovy deleted file mode 100644 index ce166235d63322..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q19.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(lineitem broadcast part) */ - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q3.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q3.groovy deleted file mode 100644 index 543193d069821d..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q3.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set runtime_filter_mode=OFF' - - - sql 'set be_number_for_test=3' - - - qt_select """ - explain shape plan - select - /*+ leading(lineitem {orders shuffle customer}) */ - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q4.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q4.groovy deleted file mode 100644 index fd004fe6981d4a..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q4.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set runtime_filter_mode=OFF' - - - - -sql 'set be_number_for_test=3' - - - qt_select """ - explain shape plan - select - /*+ leading(lineitem orders) */ - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q5.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q5.groovy deleted file mode 100644 index 0e1527e34a44ad..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q5.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading(lineitem orders broadcast {supplier broadcast {nation broadcast region}} shuffle customer) */ - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q7.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q7.groovy deleted file mode 100644 index 0f33f61c395502..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q7.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - /*+ leading( lineitem broadcast {supplier broadcast n1} {orders shuffle {customer broadcast n2}}) */ - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q8.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q8.groovy deleted file mode 100644 index e70333dd1b051b..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q8.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - - qt_select """ - explain shape plan - select - /*+ leading( supplier { orders {lineitem broadcast part} {customer broadcast {n1 broadcast region}}} broadcast n2) */ - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q9.groovy b/regression-test/suites/new_shapes_p0/hint_tpch/shape/q9.groovy deleted file mode 100644 index d1fd50b47c56b8..00000000000000 --- a/regression-test/suites/new_shapes_p0/hint_tpch/shape/q9.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql 'set runtime_filter_mode=OFF' - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - -sql 'set be_number_for_test=3' - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - /*+ leading(orders shuffle {lineitem shuffle part} shuffle {supplier broadcast nation} shuffle partsupp) */ - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/load.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/load.groovy deleted file mode 100644 index 2544b972878259..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/load.groovy +++ /dev/null @@ -1,218 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - sql """ -CREATE TABLE IF NOT EXISTS `lineorder` ( - `lo_orderkey` int(11) NOT NULL COMMENT '', - `lo_linenumber` int(11) NOT NULL COMMENT '', - `lo_custkey` int(11) NOT NULL COMMENT '', - `lo_partkey` int(11) NOT NULL COMMENT '', - `lo_suppkey` int(11) NOT NULL COMMENT '', - `lo_orderdate` int(11) NOT NULL COMMENT '', - `lo_orderpriority` varchar(16) NOT NULL COMMENT '', - `lo_shippriority` int(11) NOT NULL COMMENT '', - `lo_quantity` int(11) NOT NULL COMMENT '', - `lo_extendedprice` int(11) NOT NULL COMMENT '', - `lo_ordtotalprice` int(11) NOT NULL COMMENT '', - `lo_discount` int(11) NOT NULL COMMENT '', - `lo_revenue` int(11) NOT NULL COMMENT '', - `lo_supplycost` int(11) NOT NULL COMMENT '', - `lo_tax` int(11) NOT NULL COMMENT '', - `lo_commitdate` int(11) NOT NULL COMMENT '', - `lo_shipmode` varchar(11) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`lo_orderkey`) -COMMENT "OLAP" -PARTITION BY RANGE(`lo_orderdate`) -(PARTITION p1 VALUES [("-2147483648"), ("19930101")), -PARTITION p2 VALUES [("19930101"), ("19940101")), -PARTITION p3 VALUES [("19940101"), ("19950101")), -PARTITION p4 VALUES [("19950101"), ("19960101")), -PARTITION p5 VALUES [("19960101"), ("19970101")), -PARTITION p6 VALUES [("19970101"), ("19980101")), -PARTITION p7 VALUES [("19980101"), ("19990101"))) -DISTRIBUTED BY HASH(`lo_orderkey`) BUCKETS 48 -PROPERTIES ( -"replication_num" = "1", -"colocate_with" = "groupa1", -"in_memory" = "false", -"storage_format" = "DEFAULT" -);""" - -sql """ -CREATE TABLE IF NOT EXISTS `customer` ( - `c_custkey` int(11) NOT NULL COMMENT '', - `c_name` varchar(26) NOT NULL COMMENT '', - `c_address` varchar(41) NOT NULL COMMENT '', - `c_city` varchar(11) NOT NULL COMMENT '', - `c_nation` varchar(16) NOT NULL COMMENT '', - `c_region` varchar(13) NOT NULL COMMENT '', - `c_phone` varchar(16) NOT NULL COMMENT '', - `c_mktsegment` varchar(11) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`c_custkey`) -COMMENT "OLAP" -DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 12 -PROPERTIES ( -"replication_num" = "1", -"colocate_with" = "groupa2", -"in_memory" = "false", -"storage_format" = "DEFAULT" -);""" - -sql """ -CREATE TABLE IF NOT EXISTS `dates` ( - `d_datekey` int(11) NOT NULL COMMENT '', - `d_date` varchar(20) NOT NULL COMMENT '', - `d_dayofweek` varchar(10) NOT NULL COMMENT '', - `d_month` varchar(11) NOT NULL COMMENT '', - `d_year` int(11) NOT NULL COMMENT '', - `d_yearmonthnum` int(11) NOT NULL COMMENT '', - `d_yearmonth` varchar(9) NOT NULL COMMENT '', - `d_daynuminweek` int(11) NOT NULL COMMENT '', - `d_daynuminmonth` int(11) NOT NULL COMMENT '', - `d_daynuminyear` int(11) NOT NULL COMMENT '', - `d_monthnuminyear` int(11) NOT NULL COMMENT '', - `d_weeknuminyear` int(11) NOT NULL COMMENT '', - `d_sellingseason` varchar(14) NOT NULL COMMENT '', - `d_lastdayinweekfl` int(11) NOT NULL COMMENT '', - `d_lastdayinmonthfl` int(11) NOT NULL COMMENT '', - `d_holidayfl` int(11) NOT NULL COMMENT '', - `d_weekdayfl` int(11) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`d_datekey`) -COMMENT "OLAP" -DISTRIBUTED BY HASH(`d_datekey`) BUCKETS 1 -PROPERTIES ( -"replication_num" = "1", -"in_memory" = "false", -"colocate_with" = "groupa3", -"storage_format" = "DEFAULT" -);""" - -sql """ - - CREATE TABLE IF NOT EXISTS `supplier` ( - `s_suppkey` int(11) NOT NULL COMMENT '', - `s_name` varchar(26) NOT NULL COMMENT '', - `s_address` varchar(26) NOT NULL COMMENT '', - `s_city` varchar(11) NOT NULL COMMENT '', - `s_nation` varchar(16) NOT NULL COMMENT '', - `s_region` varchar(13) NOT NULL COMMENT '', - `s_phone` varchar(16) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`s_suppkey`) -COMMENT "OLAP" -DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 12 -PROPERTIES ( -"replication_num" = "1", -"colocate_with" = "groupa4", -"in_memory" = "false", -"storage_format" = "DEFAULT" -);""" - -sql """ -CREATE TABLE IF NOT EXISTS `part` ( - `p_partkey` int(11) NOT NULL COMMENT '', - `p_name` varchar(23) NOT NULL COMMENT '', - `p_mfgr` varchar(7) NOT NULL COMMENT '', - `p_category` varchar(8) NOT NULL COMMENT '', - `p_brand` varchar(10) NOT NULL COMMENT '', - `p_color` varchar(12) NOT NULL COMMENT '', - `p_type` varchar(26) NOT NULL COMMENT '', - `p_size` int(11) NOT NULL COMMENT '', - `p_container` varchar(11) NOT NULL COMMENT '' -) ENGINE=OLAP -DUPLICATE KEY(`p_partkey`) -COMMENT "OLAP" -DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 12 -PROPERTIES ( -"replication_num" = "1", -"colocate_with" = "groupa5", -"in_memory" = "false", -"storage_format" = "DEFAULT" -);""" - -sql """alter table dates modify column d_lastdayinweekfl set stats ('row_count'='2556', 'ndv'='2', 'num_nulls'='0', 'min_value'='0', 'max_value'='1', 'data_size'='10224');""" -sql """alter table supplier modify column s_suppkey set stats ('row_count'='200000', 'ndv'='196099', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000', 'data_size'='800000');""" -sql """alter table lineorder modify column lo_quantity set stats ('row_count'='600037902', 'ndv'='50', 'num_nulls'='0', 'min_value'='1', 'max_value'='50', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_shipmode set stats ('row_count'='600037902', 'ndv'='7', 'num_nulls'='0', 'min_value'='AIR', 'max_value'='TRUCK', 'data_size'='2571562204');""" -sql """alter table customer modify column c_name set stats ('row_count'='3000000', 'ndv'='3017713', 'num_nulls'='0', 'min_value'='Customer#000000001', 'max_value'='Customer#003000000', 'data_size'='54000000');""" -sql """alter table dates modify column d_date set stats ('row_count'='2556', 'ndv'='2539', 'num_nulls'='0', 'min_value'='April 1, 1992', 'max_value'='September 9, 1998', 'data_size'='38181');""" -sql """alter table dates modify column d_daynuminyear set stats ('row_count'='2556', 'ndv'='366', 'num_nulls'='0', 'min_value'='1', 'max_value'='366', 'data_size'='10224');""" -sql """alter table dates modify column d_yearmonth set stats ('row_count'='2556', 'ndv'='84', 'num_nulls'='0', 'min_value'='Apr1992', 'max_value'='Sep1998', 'data_size'='17892');""" -sql """alter table part modify column p_mfgr set stats ('row_count'='1400000', 'ndv'='5', 'num_nulls'='0', 'min_value'='MFGR#1', 'max_value'='MFGR#5', 'data_size'='8400000');""" -sql """alter table part modify column p_name set stats ('row_count'='1400000', 'ndv'='8417', 'num_nulls'='0', 'min_value'='almond antique', 'max_value'='yellow white', 'data_size'='17705366');""" -sql """alter table lineorder modify column lo_extendedprice set stats ('row_count'='600037902', 'ndv'='1135983', 'num_nulls'='0', 'min_value'='90096', 'max_value'='10494950', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_linenumber set stats ('row_count'='600037902', 'ndv'='7', 'num_nulls'='0', 'min_value'='1', 'max_value'='7', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_partkey set stats ('row_count'='600037902', 'ndv'='999528', 'num_nulls'='0', 'min_value'='1', 'max_value'='1000000', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_shippriority set stats ('row_count'='600037902', 'ndv'='1', 'num_nulls'='0', 'min_value'='0', 'max_value'='0', 'data_size'='2400151608');""" -sql """alter table customer modify column c_mktsegment set stats ('row_count'='3000000', 'ndv'='5', 'num_nulls'='0', 'min_value'='AUTOMOBILE', 'max_value'='MACHINERY', 'data_size'='26999329');""" -sql """alter table dates modify column d_dayofweek set stats ('row_count'='2556', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='18258');""" -sql """alter table dates modify column d_sellingseason set stats ('row_count'='2556', 'ndv'='5', 'num_nulls'='0', 'min_value'='Christmas', 'max_value'='Winter', 'data_size'='15760');""" -sql """alter table dates modify column d_weekdayfl set stats ('row_count'='2556', 'ndv'='2', 'num_nulls'='0', 'min_value'='0', 'max_value'='1', 'data_size'='10224');""" -sql """alter table supplier modify column s_city set stats ('row_count'='200000', 'ndv'='250', 'num_nulls'='0', 'min_value'='ALGERIA 0', 'max_value'='VIETNAM 9', 'data_size'='2000000');""" -sql """alter table part modify column p_category set stats ('row_count'='1400000', 'ndv'='25', 'num_nulls'='0', 'min_value'='MFGR#11', 'max_value'='MFGR#55', 'data_size'='9800000');""" -sql """alter table part modify column p_size set stats ('row_count'='1400000', 'ndv'='50', 'num_nulls'='0', 'min_value'='1', 'max_value'='50', 'data_size'='5600000');""" -sql """alter table part modify column p_type set stats ('row_count'='1400000', 'ndv'='150', 'num_nulls'='0', 'min_value'='ECONOMY ANODIZED BRASS', 'max_value'='STANDARD POLISHED TIN', 'data_size'='28837497');""" -sql """alter table lineorder modify column lo_orderkey set stats ('row_count'='600037902', 'ndv'='148064528', 'num_nulls'='0', 'min_value'='1', 'max_value'='600000000', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_revenue set stats ('row_count'='600037902', 'ndv'='6280312', 'num_nulls'='0', 'min_value'='81087', 'max_value'='10494950', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_suppkey set stats ('row_count'='600037902', 'ndv'='196099', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_supplycost set stats ('row_count'='600037902', 'ndv'='15824', 'num_nulls'='0', 'min_value'='54057', 'max_value'='125939', 'data_size'='2400151608');""" -sql """alter table customer modify column c_address set stats ('row_count'='3000000', 'ndv'='3011483', 'num_nulls'='0', 'min_value'=' yaP00NZn4mxv', 'max_value'='zzzzsVRceYXRDisV3RC', 'data_size'='44994193');""" -sql """alter table dates modify column d_datekey set stats ('row_count'='2556', 'ndv'='2560', 'num_nulls'='0', 'min_value'='19920101', 'max_value'='19981230', 'data_size'='10224');""" -sql """alter table dates modify column d_daynuminmonth set stats ('row_count'='2556', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='10224');""" -sql """alter table dates modify column d_year set stats ('row_count'='2556', 'ndv'='7', 'num_nulls'='0', 'min_value'='1992', 'max_value'='1998', 'data_size'='10224');""" -sql """alter table supplier modify column s_address set stats ('row_count'='200000', 'ndv'='197960', 'num_nulls'='0', 'min_value'=' 2MrUy', 'max_value'='zzzqXhTdKxT0RAR8yxbc', 'data_size'='2998285');""" -sql """alter table lineorder modify column lo_commitdate set stats ('row_count'='600037902', 'ndv'='2469', 'num_nulls'='0', 'min_value'='19920131', 'max_value'='19981031', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_tax set stats ('row_count'='600037902', 'ndv'='9', 'num_nulls'='0', 'min_value'='0', 'max_value'='8', 'data_size'='2400151608');""" -sql """alter table customer modify column c_city set stats ('row_count'='3000000', 'ndv'='250', 'num_nulls'='0', 'min_value'='ALGERIA 0', 'max_value'='VIETNAM 9', 'data_size'='30000000');""" -sql """alter table customer modify column c_custkey set stats ('row_count'='3000000', 'ndv'='2985828', 'num_nulls'='0', 'min_value'='1', 'max_value'='3000000', 'data_size'='12000000');""" -sql """alter table dates modify column d_daynuminweek set stats ('row_count'='2556', 'ndv'='7', 'num_nulls'='0', 'min_value'='1', 'max_value'='7', 'data_size'='10224');""" -sql """alter table dates modify column d_lastdayinmonthfl set stats ('row_count'='2556', 'ndv'='2', 'num_nulls'='0', 'min_value'='0', 'max_value'='1', 'data_size'='10224');""" -sql """alter table dates modify column d_month set stats ('row_count'='2556', 'ndv'='12', 'num_nulls'='0', 'min_value'='April', 'max_value'='September', 'data_size'='15933');""" -sql """alter table dates modify column d_yearmonthnum set stats ('row_count'='2556', 'ndv'='84', 'num_nulls'='0', 'min_value'='199201', 'max_value'='199812', 'data_size'='10224');""" -sql """alter table supplier modify column s_phone set stats ('row_count'='200000', 'ndv'='199261', 'num_nulls'='0', 'min_value'='10-100-177-2350', 'max_value'='34-999-827-8511', 'data_size'='3000000');""" -sql """alter table part modify column p_partkey set stats ('row_count'='1400000', 'ndv'='1394881', 'num_nulls'='0', 'min_value'='1', 'max_value'='1400000', 'data_size'='5600000');""" -sql """alter table lineorder modify column lo_custkey set stats ('row_count'='600037902', 'ndv'='1962895', 'num_nulls'='0', 'min_value'='1', 'max_value'='2999999', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_orderdate set stats ('row_count'='600037902', 'ndv'='2408', 'num_nulls'='0', 'min_value'='19920101', 'max_value'='19980802', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_ordtotalprice set stats ('row_count'='600037902', 'ndv'='35026888', 'num_nulls'='0', 'min_value'='81806', 'max_value'='60690215', 'data_size'='2400151608');""" -sql """alter table customer modify column c_nation set stats ('row_count'='3000000', 'ndv'='25', 'num_nulls'='0', 'min_value'='ALGERIA', 'max_value'='VIETNAM', 'data_size'='21248112');""" -sql """alter table customer modify column c_phone set stats ('row_count'='3000000', 'ndv'='3012496', 'num_nulls'='0', 'min_value'='10-100-106-1617', 'max_value'='34-999-998-5763', 'data_size'='45000000');""" -sql """alter table customer modify column c_region set stats ('row_count'='3000000', 'ndv'='5', 'num_nulls'='0', 'min_value'='AFRICA', 'max_value'='MIDDLE EAST', 'data_size'='20398797');""" -sql """alter table dates modify column d_holidayfl set stats ('row_count'='2556', 'ndv'='2', 'num_nulls'='0', 'min_value'='0', 'max_value'='1', 'data_size'='10224');""" -sql """alter table dates modify column d_weeknuminyear set stats ('row_count'='2556', 'ndv'='53', 'num_nulls'='0', 'min_value'='1', 'max_value'='53', 'data_size'='10224');""" -sql """alter table supplier modify column s_nation set stats ('row_count'='200000', 'ndv'='25', 'num_nulls'='0', 'min_value'='ALGERIA', 'max_value'='VIETNAM', 'data_size'='1415335');""" -sql """alter table part modify column p_brand set stats ('row_count'='1400000', 'ndv'='1002', 'num_nulls'='0', 'min_value'='MFGR#111', 'max_value'='MFGR#559', 'data_size'='12285135');""" -sql """alter table part modify column p_color set stats ('row_count'='1400000', 'ndv'='92', 'num_nulls'='0', 'min_value'='almond', 'max_value'='yellow', 'data_size'='8170588');""" -sql """alter table part modify column p_container set stats ('row_count'='1400000', 'ndv'='40', 'num_nulls'='0', 'min_value'='JUMBO BAG', 'max_value'='WRAP PKG', 'data_size'='10606696');""" -sql """alter table lineorder modify column lo_discount set stats ('row_count'='600037902', 'ndv'='11', 'num_nulls'='0', 'min_value'='0', 'max_value'='10', 'data_size'='2400151608');""" -sql """alter table lineorder modify column lo_orderpriority set stats ('row_count'='600037902', 'ndv'='5', 'num_nulls'='0', 'min_value'='1-URGENT', 'max_value'='5-LOW', 'data_size'='5040804567');""" -sql """alter table dates modify column d_monthnuminyear set stats ('row_count'='2556', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='10224');""" -sql """alter table supplier modify column s_name set stats ('row_count'='200000', 'ndv'='201596', 'num_nulls'='0', 'min_value'='Supplier#000000001', 'max_value'='Supplier#000200000', 'data_size'='3600000');""" -sql """alter table supplier modify column s_region set stats ('row_count'='200000', 'ndv'='5', 'num_nulls'='0', 'min_value'='AFRICA', 'max_value'='MIDDLE EAST', 'data_size'='1360337');""" - - -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/flat.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/flat.groovy deleted file mode 100644 index 1ba75597477e43..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/flat.groovy +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql "set enable_parallel_result_sink=false;" - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - LO_ORDERDATE, - LO_ORDERKEY, - LO_LINENUMBER, - LO_CUSTKEY, - LO_PARTKEY, - LO_SUPPKEY, - LO_ORDERPRIORITY, - LO_SHIPPRIORITY, - LO_QUANTITY, - LO_EXTENDEDPRICE, - LO_ORDTOTALPRICE, - LO_DISCOUNT, - LO_REVENUE, - LO_SUPPLYCOST, - LO_TAX, - LO_COMMITDATE, - LO_SHIPMODE, - C_NAME, - C_ADDRESS, - C_CITY, - C_NATION, - C_REGION, - C_PHONE, - C_MKTSEGMENT, - S_NAME, - S_ADDRESS, - S_CITY, - S_NATION, - S_REGION, - S_PHONE, - P_NAME, - P_MFGR, - P_CATEGORY, - P_BRAND, - P_COLOR, - P_TYPE, - P_SIZE, - P_CONTAINER - FROM ( - SELECT - lo_orderkey, - lo_linenumber, - lo_custkey, - lo_partkey, - lo_suppkey, - lo_orderdate, - lo_orderpriority, - lo_shippriority, - lo_quantity, - lo_extendedprice, - lo_ordtotalprice, - lo_discount, - lo_revenue, - lo_supplycost, - lo_tax, - lo_commitdate, - lo_shipmode - FROM lineorder - ) l - INNER JOIN customer c - ON (c.c_custkey = l.lo_custkey) - INNER JOIN supplier s - ON (s.s_suppkey = l.lo_suppkey) - INNER JOIN part p - ON (p.p_partkey = l.lo_partkey); - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.1.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.1.groovy deleted file mode 100644 index 42b34b901e9c0e..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.1.groovy +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT SUM(lo_extendedprice * lo_discount) AS REVENUE -FROM lineorder, dates -WHERE - lo_orderdate = d_datekey - AND d_year = 1993 - AND lo_discount BETWEEN 1 AND 3 - AND lo_quantity < 25; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.2.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.2.groovy deleted file mode 100644 index 5ce77e665c070d..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.2.groovy +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1.2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT SUM(lo_extendedprice * lo_discount) AS REVENUE -FROM lineorder, dates -WHERE - lo_orderdate = d_datekey - AND d_yearmonth = 'Jan1994' - AND lo_discount BETWEEN 4 AND 6 - AND lo_quantity BETWEEN 26 AND 35; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.3.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.3.groovy deleted file mode 100644 index 61155a26f647af..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q1.3.groovy +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1.3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - SUM(lo_extendedprice * lo_discount) AS REVENUE -FROM lineorder, dates -WHERE - lo_orderdate = d_datekey - AND d_weeknuminyear = 6 - AND d_year = 1994 - AND lo_discount BETWEEN 5 AND 7 - AND lo_quantity BETWEEN 26 AND 35; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.1.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.1.groovy deleted file mode 100644 index 8d3232ec783355..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.1.groovy +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT SUM(lo_revenue), d_year, p_brand -FROM lineorder, dates, part, supplier -WHERE - lo_orderdate = d_datekey - AND lo_partkey = p_partkey - AND lo_suppkey = s_suppkey - AND p_category = 'MFGR#12' - AND s_region = 'AMERICA' -GROUP BY d_year, p_brand -ORDER BY p_brand; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.2.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.2.groovy deleted file mode 100644 index dd614fecc779b5..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.2.groovy +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2.2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan -SELECT SUM(lo_revenue), d_year, p_brand -FROM lineorder, dates, part, supplier -WHERE - lo_orderdate = d_datekey - AND lo_partkey = p_partkey - AND lo_suppkey = s_suppkey - AND p_brand BETWEEN 'MFGR#2221' AND 'MFGR#2228' - AND s_region = 'ASIA' -GROUP BY d_year, p_brand -ORDER BY d_year, p_brand; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.3.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.3.groovy deleted file mode 100644 index cf39d3d1047e8d..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q2.3.groovy +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2.3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan -SELECT SUM(lo_revenue), d_year, p_brand -FROM lineorder, dates, part, supplier -WHERE - lo_orderdate = d_datekey - AND lo_partkey = p_partkey - AND lo_suppkey = s_suppkey - AND p_brand = 'MFGR#2239' - AND s_region = 'EUROPE' -GROUP BY d_year, p_brand -ORDER BY d_year, p_brand; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.1.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.1.groovy deleted file mode 100644 index a5a7eadb3735e2..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.1.groovy +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - c_nation, - s_nation, - d_year, - SUM(lo_revenue) AS REVENUE -FROM customer, lineorder, supplier, dates -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_orderdate = d_datekey - AND c_region = 'ASIA' - AND s_region = 'ASIA' - AND d_year >= 1992 - AND d_year <= 1997 -GROUP BY c_nation, s_nation, d_year -ORDER BY d_year ASC, REVENUE DESC; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.2.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.2.groovy deleted file mode 100644 index 4755927b522171..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.2.groovy +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3.2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - c_city, - s_city, - d_year, - SUM(lo_revenue) AS REVENUE -FROM customer, lineorder, supplier, dates -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_orderdate = d_datekey - AND c_nation = 'UNITED STATES' - AND s_nation = 'UNITED STATES' - AND d_year >= 1992 - AND d_year <= 1997 -GROUP BY c_city, s_city, d_year -ORDER BY d_year ASC, REVENUE DESC; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.3.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.3.groovy deleted file mode 100644 index 7f349a8f0841fc..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.3.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3.3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - c_city, - s_city, - d_year, - SUM(lo_revenue) AS REVENUE -FROM customer, lineorder, supplier, dates -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_orderdate = d_datekey - AND ( - c_city = 'UNITED KI1' - OR c_city = 'UNITED KI5' - ) - AND ( - s_city = 'UNITED KI1' - OR s_city = 'UNITED KI5' - ) - AND d_year >= 1992 - AND d_year <= 1997 -GROUP BY c_city, s_city, d_year -ORDER BY d_year ASC, REVENUE DESC; -""" -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.4.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.4.groovy deleted file mode 100644 index ff34697a8521aa..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q3.4.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3.4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - c_city, - s_city, - d_year, - SUM(lo_revenue) AS REVENUE -FROM customer, lineorder, supplier, dates -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_orderdate = d_datekey - AND ( - c_city = 'UNITED KI1' - OR c_city = 'UNITED KI5' - ) - AND ( - s_city = 'UNITED KI1' - OR s_city = 'UNITED KI5' - ) - AND d_yearmonth = 'Dec1997' -GROUP BY c_city, s_city, d_year -ORDER BY d_year ASC, REVENUE DESC; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.1.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.1.groovy deleted file mode 100644 index 91d4bf499a1c39..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.1.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4.1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - d_year, - c_nation, - SUM(lo_revenue - lo_supplycost) AS PROFIT -FROM dates, customer, supplier, part, lineorder -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_partkey = p_partkey - AND lo_orderdate = d_datekey - AND c_region = 'AMERICA' - AND s_region = 'AMERICA' - AND ( - p_mfgr = 'MFGR#1' - OR p_mfgr = 'MFGR#2' - ) -GROUP BY d_year, c_nation -ORDER BY d_year, c_nation; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.2.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.2.groovy deleted file mode 100644 index da8b425a810a4b..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.2.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4.2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - d_year, - s_nation, - p_category, - SUM(lo_revenue - lo_supplycost) AS PROFIT -FROM dates, customer, supplier, part, lineorder -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_partkey = p_partkey - AND lo_orderdate = d_datekey - AND c_region = 'AMERICA' - AND s_region = 'AMERICA' - AND ( - d_year = 1997 - OR d_year = 1998 - ) - AND ( - p_mfgr = 'MFGR#1' - OR p_mfgr = 'MFGR#2' - ) -GROUP BY d_year, s_nation, p_category -ORDER BY d_year, s_nation, p_category; - """ -} diff --git a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.3.groovy b/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.3.groovy deleted file mode 100644 index bb3f5de73837bd..00000000000000 --- a/regression-test/suites/new_shapes_p0/ssb_sf100/shape/q4.3.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4.3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - -sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' -sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - SELECT - d_year, - s_city, - p_brand, - SUM(lo_revenue - lo_supplycost) AS PROFIT -FROM dates, customer, supplier, part, lineorder -WHERE - lo_custkey = c_custkey - AND lo_suppkey = s_suppkey - AND lo_partkey = p_partkey - AND lo_orderdate = d_datekey - AND s_nation = 'UNITED STATES' - AND ( - d_year = 1997 - OR d_year = 1998 - ) - AND p_category = 'MFGR#14' -GROUP BY d_year, s_city, p_brand -ORDER BY d_year, s_city, p_brand; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/load.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/load.groovy deleted file mode 100644 index 1ed3ebba10e9f8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/load.groovy +++ /dev/null @@ -1,2552 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - - sql ''' - drop table if exists customer_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_demographics ( - cd_demo_sk bigint not null, - cd_gender char(1), - cd_marital_status char(1), - cd_education_status char(20), - cd_purchase_estimate integer, - cd_credit_rating char(10), - cd_dep_count integer, - cd_dep_employed_count integer, - cd_dep_college_count integer - ) - DUPLICATE KEY(cd_demo_sk) - DISTRIBUTED BY HASH(cd_gender) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists reason - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS reason ( - r_reason_sk bigint not null, - r_reason_id char(16) not null, - r_reason_desc char(100) - ) - DUPLICATE KEY(r_reason_sk) - DISTRIBUTED BY HASH(r_reason_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists date_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS date_dim ( - d_date_sk bigint not null, - d_date_id char(16) not null, - d_date datev2, - d_month_seq integer, - d_week_seq integer, - d_quarter_seq integer, - d_year integer, - d_dow integer, - d_moy integer, - d_dom integer, - d_qoy integer, - d_fy_year integer, - d_fy_quarter_seq integer, - d_fy_week_seq integer, - d_day_name char(9), - d_quarter_name char(6), - d_holiday char(1), - d_weekend char(1), - d_following_holiday char(1), - d_first_dom integer, - d_last_dom integer, - d_same_day_ly integer, - d_same_day_lq integer, - d_current_day char(1), - d_current_week char(1), - d_current_month char(1), - d_current_quarter char(1), - d_current_year char(1) - ) - DUPLICATE KEY(d_date_sk) - DISTRIBUTED BY HASH(d_date_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists warehouse - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS warehouse ( - w_warehouse_sk bigint not null, - w_warehouse_id char(16) not null, - w_warehouse_name varchar(20), - w_warehouse_sq_ft integer, - w_street_number char(10), - w_street_name varchar(60), - w_street_type char(15), - w_suite_number char(10), - w_city varchar(60), - w_county varchar(30), - w_state char(2), - w_zip char(10), - w_country varchar(20), - w_gmt_offset decimalv3(5,2) - ) - DUPLICATE KEY(w_warehouse_sk) - DISTRIBUTED BY HASH(w_warehouse_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_sales ( - cs_sold_date_sk bigint, - cs_item_sk bigint not null, - cs_order_number bigint not null, - cs_sold_time_sk bigint, - cs_ship_date_sk bigint, - cs_bill_customer_sk bigint, - cs_bill_cdemo_sk bigint, - cs_bill_hdemo_sk bigint, - cs_bill_addr_sk bigint, - cs_ship_customer_sk bigint, - cs_ship_cdemo_sk bigint, - cs_ship_hdemo_sk bigint, - cs_ship_addr_sk bigint, - cs_call_center_sk bigint, - cs_catalog_page_sk bigint, - cs_ship_mode_sk bigint, - cs_warehouse_sk bigint, - cs_promo_sk bigint, - cs_quantity integer, - cs_wholesale_cost decimalv3(7,2), - cs_list_price decimalv3(7,2), - cs_sales_price decimalv3(7,2), - cs_ext_discount_amt decimalv3(7,2), - cs_ext_sales_price decimalv3(7,2), - cs_ext_wholesale_cost decimalv3(7,2), - cs_ext_list_price decimalv3(7,2), - cs_ext_tax decimalv3(7,2), - cs_coupon_amt decimalv3(7,2), - cs_ext_ship_cost decimalv3(7,2), - cs_net_paid decimalv3(7,2), - cs_net_paid_inc_tax decimalv3(7,2), - cs_net_paid_inc_ship decimalv3(7,2), - cs_net_paid_inc_ship_tax decimalv3(7,2), - cs_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(cs_sold_date_sk, cs_item_sk) - DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists call_center - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS call_center ( - cc_call_center_sk bigint not null, - cc_call_center_id char(16) not null, - cc_rec_start_date datev2, - cc_rec_end_date datev2, - cc_closed_date_sk integer, - cc_open_date_sk integer, - cc_name varchar(50), - cc_class varchar(50), - cc_employees integer, - cc_sq_ft integer, - cc_hours char(20), - cc_manager varchar(40), - cc_mkt_id integer, - cc_mkt_class char(50), - cc_mkt_desc varchar(100), - cc_market_manager varchar(40), - cc_division integer, - cc_division_name varchar(50), - cc_company integer, - cc_company_name char(50), - cc_street_number char(10), - cc_street_name varchar(60), - cc_street_type char(15), - cc_suite_number char(10), - cc_city varchar(60), - cc_county varchar(30), - cc_state char(2), - cc_zip char(10), - cc_country varchar(20), - cc_gmt_offset decimalv3(5,2), - cc_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(cc_call_center_sk) - DISTRIBUTED BY HASH(cc_call_center_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists inventory - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS inventory ( - inv_date_sk bigint not null, - inv_item_sk bigint not null, - inv_warehouse_sk bigint, - inv_quantity_on_hand integer - ) - DUPLICATE KEY(inv_date_sk, inv_item_sk, inv_warehouse_sk) - DISTRIBUTED BY HASH(inv_date_sk, inv_item_sk, inv_warehouse_sk) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_returns ( - cr_item_sk bigint not null, - cr_order_number bigint not null, - cr_returned_date_sk bigint, - cr_returned_time_sk bigint, - cr_refunded_customer_sk bigint, - cr_refunded_cdemo_sk bigint, - cr_refunded_hdemo_sk bigint, - cr_refunded_addr_sk bigint, - cr_returning_customer_sk bigint, - cr_returning_cdemo_sk bigint, - cr_returning_hdemo_sk bigint, - cr_returning_addr_sk bigint, - cr_call_center_sk bigint, - cr_catalog_page_sk bigint, - cr_ship_mode_sk bigint, - cr_warehouse_sk bigint, - cr_reason_sk bigint, - cr_return_quantity integer, - cr_return_amount decimalv3(7,2), - cr_return_tax decimalv3(7,2), - cr_return_amt_inc_tax decimalv3(7,2), - cr_fee decimalv3(7,2), - cr_return_ship_cost decimalv3(7,2), - cr_refunded_cash decimalv3(7,2), - cr_reversed_charge decimalv3(7,2), - cr_store_credit decimalv3(7,2), - cr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(cr_item_sk, cr_order_number) - DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists household_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS household_demographics ( - hd_demo_sk bigint not null, - hd_income_band_sk bigint, - hd_buy_potential char(15), - hd_dep_count integer, - hd_vehicle_count integer - ) - DUPLICATE KEY(hd_demo_sk) - DISTRIBUTED BY HASH(hd_demo_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer_address - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_address ( - ca_address_sk bigint not null, - ca_address_id char(16) not null, - ca_street_number char(10), - ca_street_name varchar(60), - ca_street_type char(15), - ca_suite_number char(10), - ca_city varchar(60), - ca_county varchar(30), - ca_state char(2), - ca_zip char(10), - ca_country varchar(20), - ca_gmt_offset decimalv3(5,2), - ca_location_type char(20) - ) - DUPLICATE KEY(ca_address_sk) - DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists income_band - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS income_band ( - ib_income_band_sk bigint not null, - ib_lower_bound integer, - ib_upper_bound integer - ) - DUPLICATE KEY(ib_income_band_sk) - DISTRIBUTED BY HASH(ib_income_band_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_page ( - cp_catalog_page_sk bigint not null, - cp_catalog_page_id char(16) not null, - cp_start_date_sk integer, - cp_end_date_sk integer, - cp_department varchar(50), - cp_catalog_number integer, - cp_catalog_page_number integer, - cp_description varchar(100), - cp_type varchar(100) - ) - DUPLICATE KEY(cp_catalog_page_sk) - DISTRIBUTED BY HASH(cp_catalog_page_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists item - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS item ( - i_item_sk bigint not null, - i_item_id char(16) not null, - i_rec_start_date datev2, - i_rec_end_date datev2, - i_item_desc varchar(200), - i_current_price decimalv3(7,2), - i_wholesale_cost decimalv3(7,2), - i_brand_id integer, - i_brand char(50), - i_class_id integer, - i_class char(50), - i_category_id integer, - i_category char(50), - i_manufact_id integer, - i_manufact char(50), - i_size char(20), - i_formulation char(20), - i_color char(20), - i_units char(10), - i_container char(10), - i_manager_id integer, - i_product_name char(50) - ) - DUPLICATE KEY(i_item_sk) - DISTRIBUTED BY HASH(i_item_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_returns ( - wr_item_sk bigint not null, - wr_order_number bigint not null, - wr_returned_date_sk bigint, - wr_returned_time_sk bigint, - wr_refunded_customer_sk bigint, - wr_refunded_cdemo_sk bigint, - wr_refunded_hdemo_sk bigint, - wr_refunded_addr_sk bigint, - wr_returning_customer_sk bigint, - wr_returning_cdemo_sk bigint, - wr_returning_hdemo_sk bigint, - wr_returning_addr_sk bigint, - wr_web_page_sk bigint, - wr_reason_sk bigint, - wr_return_quantity integer, - wr_return_amt decimalv3(7,2), - wr_return_tax decimalv3(7,2), - wr_return_amt_inc_tax decimalv3(7,2), - wr_fee decimalv3(7,2), - wr_return_ship_cost decimalv3(7,2), - wr_refunded_cash decimalv3(7,2), - wr_reversed_charge decimalv3(7,2), - wr_account_credit decimalv3(7,2), - wr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(wr_item_sk, wr_order_number) - DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists web_site - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_site ( - web_site_sk bigint not null, - web_site_id char(16) not null, - web_rec_start_date datev2, - web_rec_end_date datev2, - web_name varchar(50), - web_open_date_sk bigint, - web_close_date_sk bigint, - web_class varchar(50), - web_manager varchar(40), - web_mkt_id integer, - web_mkt_class varchar(50), - web_mkt_desc varchar(100), - web_market_manager varchar(40), - web_company_id integer, - web_company_name char(50), - web_street_number char(10), - web_street_name varchar(60), - web_street_type char(15), - web_suite_number char(10), - web_city varchar(60), - web_county varchar(30), - web_state char(2), - web_zip char(10), - web_country varchar(20), - web_gmt_offset decimalv3(5,2), - web_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(web_site_sk) - DISTRIBUTED BY HASH(web_site_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists promotion - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS promotion ( - p_promo_sk bigint not null, - p_promo_id char(16) not null, - p_start_date_sk bigint, - p_end_date_sk bigint, - p_item_sk bigint, - p_cost decimalv3(15,2), - p_response_targe integer, - p_promo_name char(50), - p_channel_dmail char(1), - p_channel_email char(1), - p_channel_catalog char(1), - p_channel_tv char(1), - p_channel_radio char(1), - p_channel_press char(1), - p_channel_event char(1), - p_channel_demo char(1), - p_channel_details varchar(100), - p_purpose char(15), - p_discount_active char(1) - ) - DUPLICATE KEY(p_promo_sk) - DISTRIBUTED BY HASH(p_promo_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_sales ( - ws_sold_date_sk bigint, - ws_item_sk bigint not null, - ws_order_number bigint not null, - ws_sold_time_sk bigint, - ws_ship_date_sk bigint, - ws_bill_customer_sk bigint, - ws_bill_cdemo_sk bigint, - ws_bill_hdemo_sk bigint, - ws_bill_addr_sk bigint, - ws_ship_customer_sk bigint, - ws_ship_cdemo_sk bigint, - ws_ship_hdemo_sk bigint, - ws_ship_addr_sk bigint, - ws_web_page_sk bigint, - ws_web_site_sk bigint, - ws_ship_mode_sk bigint, - ws_warehouse_sk bigint, - ws_promo_sk bigint, - ws_quantity integer, - ws_wholesale_cost decimalv3(7,2), - ws_list_price decimalv3(7,2), - ws_sales_price decimalv3(7,2), - ws_ext_discount_amt decimalv3(7,2), - ws_ext_sales_price decimalv3(7,2), - ws_ext_wholesale_cost decimalv3(7,2), - ws_ext_list_price decimalv3(7,2), - ws_ext_tax decimalv3(7,2), - ws_coupon_amt decimalv3(7,2), - ws_ext_ship_cost decimalv3(7,2), - ws_net_paid decimalv3(7,2), - ws_net_paid_inc_tax decimalv3(7,2), - ws_net_paid_inc_ship decimalv3(7,2), - ws_net_paid_inc_ship_tax decimalv3(7,2), - ws_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ws_sold_date_sk, ws_item_sk) - DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists store - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store ( - s_store_sk bigint not null, - s_store_id char(16) not null, - s_rec_start_date datev2, - s_rec_end_date datev2, - s_closed_date_sk bigint, - s_store_name varchar(50), - s_number_employees integer, - s_floor_space integer, - s_hours char(20), - s_manager varchar(40), - s_market_id integer, - s_geography_class varchar(100), - s_market_desc varchar(100), - s_market_manager varchar(40), - s_division_id integer, - s_division_name varchar(50), - s_company_id integer, - s_company_name varchar(50), - s_street_number varchar(10), - s_street_name varchar(60), - s_street_type char(15), - s_suite_number char(10), - s_city varchar(60), - s_county varchar(30), - s_state char(2), - s_zip char(10), - s_country varchar(20), - s_gmt_offset decimalv3(5,2), - s_tax_precentage decimalv3(5,2) - ) - DUPLICATE KEY(s_store_sk) - DISTRIBUTED BY HASH(s_store_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists time_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS time_dim ( - t_time_sk bigint not null, - t_time_id char(16) not null, - t_time integer, - t_hour integer, - t_minute integer, - t_second integer, - t_am_pm char(2), - t_shift char(20), - t_sub_shift char(20), - t_meal_time char(20) - ) - DUPLICATE KEY(t_time_sk) - DISTRIBUTED BY HASH(t_time_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_page ( - wp_web_page_sk bigint not null, - wp_web_page_id char(16) not null, - wp_rec_start_date datev2, - wp_rec_end_date datev2, - wp_creation_date_sk bigint, - wp_access_date_sk bigint, - wp_autogen_flag char(1), - wp_customer_sk bigint, - wp_url varchar(100), - wp_type char(50), - wp_char_count integer, - wp_link_count integer, - wp_image_count integer, - wp_max_ad_count integer - ) - DUPLICATE KEY(wp_web_page_sk) - DISTRIBUTED BY HASH(wp_web_page_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists store_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_returns ( - sr_item_sk bigint not null, - sr_ticket_number bigint not null, - sr_returned_date_sk bigint, - sr_return_time_sk bigint, - sr_customer_sk bigint, - sr_cdemo_sk bigint, - sr_hdemo_sk bigint, - sr_addr_sk bigint, - sr_store_sk bigint, - sr_reason_sk bigint, - sr_return_quantity integer, - sr_return_amt decimalv3(7,2), - sr_return_tax decimalv3(7,2), - sr_return_amt_inc_tax decimalv3(7,2), - sr_fee decimalv3(7,2), - sr_return_ship_cost decimalv3(7,2), - sr_refunded_cash decimalv3(7,2), - sr_reversed_charge decimalv3(7,2), - sr_store_credit decimalv3(7,2), - sr_net_loss decimalv3(7,2) - ) - duplicate key(sr_item_sk, sr_ticket_number) - distributed by hash (sr_item_sk, sr_ticket_number) buckets 32 - properties ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists store_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_sales ( - ss_sold_date_sk bigint, - ss_item_sk bigint not null, - ss_ticket_number bigint not null, - ss_sold_time_sk bigint, - ss_customer_sk bigint, - ss_cdemo_sk bigint, - ss_hdemo_sk bigint, - ss_addr_sk bigint, - ss_store_sk bigint, - ss_promo_sk bigint, - ss_quantity integer, - ss_wholesale_cost decimalv3(7,2), - ss_list_price decimalv3(7,2), - ss_sales_price decimalv3(7,2), - ss_ext_discount_amt decimalv3(7,2), - ss_ext_sales_price decimalv3(7,2), - ss_ext_wholesale_cost decimalv3(7,2), - ss_ext_list_price decimalv3(7,2), - ss_ext_tax decimalv3(7,2), - ss_coupon_amt decimalv3(7,2), - ss_net_paid decimalv3(7,2), - ss_net_paid_inc_tax decimalv3(7,2), - ss_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ss_sold_date_sk, ss_item_sk) - DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists ship_mode - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS ship_mode ( - sm_ship_mode_sk bigint not null, - sm_ship_mode_id char(16) not null, - sm_type char(30), - sm_code char(10), - sm_carrier char(20), - sm_contract char(20) - ) - DUPLICATE KEY(sm_ship_mode_sk) - DISTRIBUTED BY HASH(sm_ship_mode_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer ( - c_customer_sk bigint not null, - c_customer_id char(16) not null, - c_current_cdemo_sk bigint, - c_current_hdemo_sk bigint, - c_current_addr_sk bigint, - c_first_shipto_date_sk bigint, - c_first_sales_date_sk bigint, - c_salutation char(10), - c_first_name char(20), - c_last_name char(30), - c_preferred_cust_flag char(1), - c_birth_day integer, - c_birth_month integer, - c_birth_year integer, - c_birth_country varchar(20), - c_login char(13), - c_email_address char(50), - c_last_review_date_sk bigint - ) - DUPLICATE KEY(c_customer_sk) - DISTRIBUTED BY HASH(c_customer_id) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists dbgen_version - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS dbgen_version - ( - dv_version varchar(16) , - dv_create_date datev2 , - dv_create_time datetime , - dv_cmdline_args varchar(200) - ) - DUPLICATE KEY(dv_version) - DISTRIBUTED BY HASH(dv_version) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - alter table customer add constraint customer_pk primary key (c_customer_sk); - ''' - - sql ''' - alter table customer add constraint customer_uk unique (c_customer_id); - ''' - - sql ''' - alter table store_sales add constraint ss_fk foreign key(ss_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table web_sales add constraint ws_fk foreign key(ws_bill_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table catalog_sales add constraint cs_fk foreign key(cs_bill_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table item add constraint i_item_sk_pk primary key (i_item_sk) - ''' - - sql """ - alter table customer_demographics modify column cd_dep_employed_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table date_dim modify column d_day_name set stats ('row_count'='73049', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='521779') - """ - - sql """ - alter table date_dim modify column d_following_holiday set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_same_day_ly set stats ('row_count'='73049', 'ndv'='72450', 'num_nulls'='0', 'min_value'='2414657', 'max_value'='2487705', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_city set stats ('row_count'='20', 'ndv'='12', 'num_nulls'='0', 'min_value'='Fairview', 'max_value'='Shiloh', 'data_size'='183') - """ - - sql """ - alter table warehouse modify column w_street_type set stats ('row_count'='20', 'ndv'='14', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='71') - """ - - sql """ - alter table catalog_sales modify column cs_call_center_sk set stats ('row_count'='1439980416', 'ndv'='42', 'num_nulls'='7199711', 'min_value'='1', 'max_value'='42', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_ship set stats ('row_count'='1439980416', 'ndv'='2505826', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='43956.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_sales_price set stats ('row_count'='1439980416', 'ndv'='29306', 'num_nulls'='7200276', 'min_value'='0.00', 'max_value'='300.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_class set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='0', 'min_value'='large', 'max_value'='small', 'data_size'='226') - """ - - sql """ - alter table call_center modify column cc_country set stats ('row_count'='42', 'ndv'='1', 'num_nulls'='0', 'min_value'='United States', 'max_value'='United States', 'data_size'='546') - """ - - sql """ - alter table call_center modify column cc_county set stats ('row_count'='42', 'ndv'='16', 'num_nulls'='0', 'min_value'='Barrow County', 'max_value'='Williamson County', 'data_size'='627') - """ - - sql """ - alter table call_center modify column cc_mkt_class set stats ('row_count'='42', 'ndv'='36', 'num_nulls'='0', 'min_value'='A bit narrow forms matter animals. Consist', 'max_value'='Yesterday new men can make moreov', 'data_size'='1465') - """ - - sql """ - alter table call_center modify column cc_sq_ft set stats ('row_count'='42', 'ndv'='31', 'num_nulls'='0', 'min_value'='-1890660328', 'max_value'='2122480316', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_state set stats ('row_count'='42', 'ndv'='14', 'num_nulls'='0', 'min_value'='FL', 'max_value'='WV', 'data_size'='84') - """ - - sql """ - alter table inventory modify column inv_warehouse_sk set stats ('row_count'='783000000', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='6264000000') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_addr_sk set stats ('row_count'='143996756', 'ndv'='6015811', 'num_nulls'='2881609', 'min_value'='1', 'max_value'='6000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_cash set stats ('row_count'='143996756', 'ndv'='1107525', 'num_nulls'='2879192', 'min_value'='0.00', 'max_value'='26955.24', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_cdemo_sk set stats ('row_count'='143996756', 'ndv'='1916366', 'num_nulls'='2881314', 'min_value'='1', 'max_value'='1920800', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_amt_inc_tax set stats ('row_count'='143996756', 'ndv'='1544502', 'num_nulls'='2881886', 'min_value'='0.00', 'max_value'='30418.06', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returning_addr_sk set stats ('row_count'='143996756', 'ndv'='6015811', 'num_nulls'='2883215', 'min_value'='1', 'max_value'='6000000', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_buy_potential set stats ('row_count'='7200', 'ndv'='6', 'num_nulls'='0', 'min_value'='0-500', 'max_value'='Unknown', 'data_size'='54000') - """ - - sql """ - alter table customer_address modify column ca_address_id set stats ('row_count'='6000000', 'ndv'='5984931', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPEAA', 'data_size'='96000000') - """ - - sql """ - alter table customer_address modify column ca_address_sk set stats ('row_count'='6000000', 'ndv'='6015811', 'num_nulls'='0', 'min_value'='1', 'max_value'='6000000', 'data_size'='48000000') - """ - - sql """ - alter table customer_address modify column ca_country set stats ('row_count'='6000000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='75661794') - """ - - sql """ - alter table customer_address modify column ca_location_type set stats ('row_count'='6000000', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='single family', 'data_size'='52372545') - """ - - sql """ - alter table customer_address modify column ca_street_number set stats ('row_count'='6000000', 'ndv'='1002', 'num_nulls'='0', 'min_value'='', 'max_value'='999', 'data_size'='16837336') - """ - - sql """ - alter table customer_address modify column ca_suite_number set stats ('row_count'='6000000', 'ndv'='76', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite Y', 'data_size'='45911575') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_id set stats ('row_count'='30000', 'ndv'='29953', 'num_nulls'='0', 'min_value'='AAAAAAAAAAABAAAA', 'max_value'='AAAAAAAAPPPGAAAA', 'data_size'='480000') - """ - - sql """ - alter table item modify column i_rec_end_date set stats ('row_count'='300000', 'ndv'='3', 'num_nulls'='150000', 'min_value'='1999-10-27', 'max_value'='2001-10-26', 'data_size'='1200000') - """ - - sql """ - alter table web_returns modify column wr_refunded_addr_sk set stats ('row_count'='71997522', 'ndv'='6015811', 'num_nulls'='3239971', 'min_value'='1', 'max_value'='6000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_reversed_charge set stats ('row_count'='71997522', 'ndv'='692680', 'num_nulls'='3239546', 'min_value'='0.00', 'max_value'='23194.77', 'data_size'='287990088') - """ - - sql """ - alter table web_site modify column web_state set stats ('row_count'='54', 'ndv'='18', 'num_nulls'='0', 'min_value'='AL', 'max_value'='WV', 'data_size'='108') - """ - - sql """ - alter table promotion modify column p_end_date_sk set stats ('row_count'='1500', 'ndv'='683', 'num_nulls'='18', 'min_value'='2450113', 'max_value'='2450967', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_bill_hdemo_sk set stats ('row_count'='720000376', 'ndv'='7251', 'num_nulls'='180139', 'min_value'='1', 'max_value'='7200', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ext_ship_cost set stats ('row_count'='720000376', 'ndv'='567477', 'num_nulls'='180084', 'min_value'='0.00', 'max_value'='14950.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_addr_sk set stats ('row_count'='720000376', 'ndv'='6015811', 'num_nulls'='179848', 'min_value'='1', 'max_value'='6000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ship_mode_sk set stats ('row_count'='720000376', 'ndv'='20', 'num_nulls'='180017', 'min_value'='1', 'max_value'='20', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_warehouse_sk set stats ('row_count'='720000376', 'ndv'='20', 'num_nulls'='180105', 'min_value'='1', 'max_value'='20', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_company_name set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6965') - """ - - sql """ - alter table store modify column s_gmt_offset set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='6', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_manager set stats ('row_count'='1002', 'ndv'='739', 'num_nulls'='0', 'min_value'='', 'max_value'='Zane Clifton', 'data_size'='12649') - """ - - sql """ - alter table store modify column s_street_number set stats ('row_count'='1002', 'ndv'='521', 'num_nulls'='0', 'min_value'='', 'max_value'='999', 'data_size'='2874') - """ - - sql """ - alter table time_dim modify column t_meal_time set stats ('row_count'='86400', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='lunch', 'data_size'='248400') - """ - - sql """ - alter table time_dim modify column t_time set stats ('row_count'='86400', 'ndv'='86684', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='345600') - """ - - sql """ - alter table web_page modify column wp_creation_date_sk set stats ('row_count'='3000', 'ndv'='199', 'num_nulls'='33', 'min_value'='2450604', 'max_value'='2450815', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_customer_sk set stats ('row_count'='3000', 'ndv'='713', 'num_nulls'='2147', 'min_value'='9522', 'max_value'='11995685', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_max_ad_count set stats ('row_count'='3000', 'ndv'='5', 'num_nulls'='31', 'min_value'='0', 'max_value'='4', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_url set stats ('row_count'='3000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='http://www.foo.com', 'data_size'='53406') - """ - - sql """ - alter table store_returns modify column sr_refunded_cash set stats ('row_count'='287999764', 'ndv'='928470', 'num_nulls'='10081294', 'min_value'='0.00', 'max_value'='18173.96', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_tax set stats ('row_count'='287999764', 'ndv'='117247', 'num_nulls'='10081332', 'min_value'='0.00', 'max_value'='1682.04', 'data_size'='1151999056') - """ - - sql """ - alter table store_sales modify column ss_customer_sk set stats ('row_count'='2879987999', 'ndv'='12157481', 'num_nulls'='129590766', 'min_value'='1', 'max_value'='12000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_hdemo_sk set stats ('row_count'='2879987999', 'ndv'='7251', 'num_nulls'='129594559', 'min_value'='1', 'max_value'='7200', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_store_sk set stats ('row_count'='2879987999', 'ndv'='499', 'num_nulls'='129572050', 'min_value'='1', 'max_value'='1000', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_ship_mode_id set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'data_size'='320') - """ - - sql """ - alter table ship_mode modify column sm_ship_mode_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table customer modify column c_first_name set stats ('row_count'='12000000', 'ndv'='5140', 'num_nulls'='0', 'min_value'='', 'max_value'='Zulma', 'data_size'='67593278') - """ - - sql """ - alter table customer modify column c_first_sales_date_sk set stats ('row_count'='12000000', 'ndv'='3644', 'num_nulls'='419856', 'min_value'='2448998', 'max_value'='2452648', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_first_shipto_date_sk set stats ('row_count'='12000000', 'ndv'='3644', 'num_nulls'='420769', 'min_value'='2449028', 'max_value'='2452678', 'data_size'='96000000') - """ - - sql """ - alter table customer_demographics modify column cd_dep_college_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table date_dim modify column d_dow set stats ('row_count'='73049', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'num_nulls'='0', 'min_value'='1', 'max_value'='801', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_qoy set stats ('row_count'='73049', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'num_nulls'='0', 'min_value'='1', 'max_value'='801', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_street_name set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='', 'max_value'='Wilson Elm', 'data_size'='176') - """ - - sql """ - alter table warehouse modify column w_suite_number set stats ('row_count'='20', 'ndv'='18', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite X', 'data_size'='150') - """ - - sql """ - alter table catalog_sales modify column cs_bill_cdemo_sk set stats ('row_count'='1439980416', 'ndv'='1916366', 'num_nulls'='7202134', 'min_value'='1', 'max_value'='1920800', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_bill_hdemo_sk set stats ('row_count'='1439980416', 'ndv'='7251', 'num_nulls'='7198837', 'min_value'='1', 'max_value'='7200', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ext_ship_cost set stats ('row_count'='1439980416', 'ndv'='573238', 'num_nulls'='7202537', 'min_value'='0.00', 'max_value'='14994.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_name set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='California', 'max_value'='Pacific Northwest_2', 'data_size'='572') - """ - - sql """ - alter table call_center modify column cc_street_name set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='1st', 'max_value'='Willow', 'data_size'='356') - """ - - sql """ - alter table call_center modify column cc_zip set stats ('row_count'='42', 'ndv'='19', 'num_nulls'='0', 'min_value'='18605', 'max_value'='98048', 'data_size'='210') - """ - - sql """ - alter table inventory modify column inv_quantity_on_hand set stats ('row_count'='783000000', 'ndv'='1006', 'num_nulls'='39153758', 'min_value'='0', 'max_value'='1000', 'data_size'='3132000000') - """ - - sql """ - alter table catalog_returns modify column cr_catalog_page_sk set stats ('row_count'='143996756', 'ndv'='17005', 'num_nulls'='2882502', 'min_value'='1', 'max_value'='25207', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_income_band_sk set stats ('row_count'='7200', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='57600') - """ - - sql """ - alter table catalog_page modify column cp_description set stats ('row_count'='30000', 'ndv'='30141', 'num_nulls'='0', 'min_value'='', 'max_value'='Youngsters worry both workers. Fascinating characters take cheap never alive studies. Direct, old', 'data_size'='2215634') - """ - - sql """ - alter table item modify column i_item_id set stats ('row_count'='300000', 'ndv'='150851', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPBAAA', 'data_size'='4800000') - """ - - sql """ - alter table web_returns modify column wr_account_credit set stats ('row_count'='71997522', 'ndv'='683955', 'num_nulls'='3241972', 'min_value'='0.00', 'max_value'='23166.33', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_net_loss set stats ('row_count'='71997522', 'ndv'='815608', 'num_nulls'='3240573', 'min_value'='0.50', 'max_value'='15887.84', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_amt set stats ('row_count'='71997522', 'ndv'='808311', 'num_nulls'='3238405', 'min_value'='0.00', 'max_value'='29191.00', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_amt_inc_tax set stats ('row_count'='71997522', 'ndv'='1359913', 'num_nulls'='3239765', 'min_value'='0.00', 'max_value'='30393.01', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_quantity set stats ('row_count'='71997522', 'ndv'='100', 'num_nulls'='3238643', 'min_value'='1', 'max_value'='100', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returning_addr_sk set stats ('row_count'='71997522', 'ndv'='6015811', 'num_nulls'='3239658', 'min_value'='1', 'max_value'='6000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_returning_customer_sk set stats ('row_count'='71997522', 'ndv'='12119220', 'num_nulls'='3237281', 'min_value'='1', 'max_value'='12000000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_mkt_desc set stats ('row_count'='54', 'ndv'='38', 'num_nulls'='0', 'min_value'='Acres see else children. Mutual too', 'max_value'='Windows increase to a differences. Other parties might in', 'data_size'='3473') - """ - - sql """ - alter table web_site modify column web_mkt_id set stats ('row_count'='54', 'ndv'='6', 'num_nulls'='1', 'min_value'='1', 'max_value'='6', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_rec_end_date set stats ('row_count'='54', 'ndv'='3', 'num_nulls'='27', 'min_value'='1999-08-16', 'max_value'='2001-08-15', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_site_id set stats ('row_count'='54', 'ndv'='27', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPBAAAAAA', 'data_size'='864') - """ - - sql """ - alter table web_site modify column web_street_type set stats ('row_count'='54', 'ndv'='20', 'num_nulls'='0', 'min_value'='Ave', 'max_value'='Wy', 'data_size'='208') - """ - - sql """ - alter table promotion modify column p_channel_demo set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1479') - """ - - sql """ - alter table promotion modify column p_channel_details set stats ('row_count'='1500', 'ndv'='1490', 'num_nulls'='0', 'min_value'='', 'max_value'='Young, valuable companies watch walls. Payments can flour', 'data_size'='59126') - """ - - sql """ - alter table promotion modify column p_channel_event set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1482') - """ - - sql """ - alter table promotion modify column p_discount_active set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1473') - """ - - sql """ - alter table promotion modify column p_promo_sk set stats ('row_count'='1500', 'ndv'='1489', 'num_nulls'='0', 'min_value'='1', 'max_value'='1500', 'data_size'='12000') - """ - - sql """ - alter table promotion modify column p_purpose set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='10374') - """ - - sql """ - alter table web_sales modify column ws_bill_cdemo_sk set stats ('row_count'='720000376', 'ndv'='1916366', 'num_nulls'='179788', 'min_value'='1', 'max_value'='1920800', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_sold_date_sk set stats ('row_count'='720000376', 'ndv'='1820', 'num_nulls'='179921', 'min_value'='2450816', 'max_value'='2452642', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_web_site_sk set stats ('row_count'='720000376', 'ndv'='54', 'num_nulls'='179930', 'min_value'='1', 'max_value'='54', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_city set stats ('row_count'='1002', 'ndv'='55', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodlawn', 'data_size'='9238') - """ - - sql """ - alter table store modify column s_company_id set stats ('row_count'='1002', 'ndv'='1', 'num_nulls'='7', 'min_value'='1', 'max_value'='1', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_county set stats ('row_count'='1002', 'ndv'='28', 'num_nulls'='0', 'min_value'='', 'max_value'='Ziebach County', 'data_size'='14291') - """ - - sql """ - alter table store modify column s_geography_class set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6972') - """ - - sql """ - alter table store modify column s_hours set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='8AM-8AM', 'data_size'='7088') - """ - - sql """ - alter table store modify column s_store_id set stats ('row_count'='1002', 'ndv'='501', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPBAAAAA', 'data_size'='16032') - """ - - sql """ - alter table store modify column s_zip set stats ('row_count'='1002', 'ndv'='354', 'num_nulls'='0', 'min_value'='', 'max_value'='99454', 'data_size'='4975') - """ - - sql """ - alter table time_dim modify column t_am_pm set stats ('row_count'='86400', 'ndv'='2', 'num_nulls'='0', 'min_value'='AM', 'max_value'='PM', 'data_size'='172800') - """ - - sql """ - alter table time_dim modify column t_minute set stats ('row_count'='86400', 'ndv'='60', 'num_nulls'='0', 'min_value'='0', 'max_value'='59', 'data_size'='345600') - """ - - sql """ - alter table web_page modify column wp_web_page_id set stats ('row_count'='3000', 'ndv'='1501', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPKAAAAA', 'data_size'='48000') - """ - - sql """ - alter table web_page modify column wp_web_page_sk set stats ('row_count'='3000', 'ndv'='2984', 'num_nulls'='0', 'min_value'='1', 'max_value'='3000', 'data_size'='24000') - """ - - sql """ - alter table store_returns modify column sr_return_amt set stats ('row_count'='287999764', 'ndv'='671228', 'num_nulls'='10080055', 'min_value'='0.00', 'max_value'='19434.00', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_returned_date_sk set stats ('row_count'='287999764', 'ndv'='2010', 'num_nulls'='10079607', 'min_value'='2450820', 'max_value'='2452822', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_ext_tax set stats ('row_count'='2879987999', 'ndv'='149597', 'num_nulls'='129588732', 'min_value'='0.00', 'max_value'='1797.48', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_current_cdemo_sk set stats ('row_count'='12000000', 'ndv'='1913901', 'num_nulls'='419895', 'min_value'='1', 'max_value'='1920800', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_customer_id set stats ('row_count'='12000000', 'ndv'='11921032', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPKAA', 'data_size'='192000000') - """ - - sql """ - alter table date_dim modify column d_current_day set stats ('row_count'='73049', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_current_month set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_date set stats ('row_count'='73049', 'ndv'='73250', 'num_nulls'='0', 'min_value'='1900-01-02', 'max_value'='2100-01-01', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_moy set stats ('row_count'='73049', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_gmt_offset set stats ('row_count'='20', 'ndv'='3', 'num_nulls'='1', 'min_value'='-7.00', 'max_value'='-5.00', 'data_size'='80') - """ - - sql """ - alter table warehouse modify column w_warehouse_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table warehouse modify column w_warehouse_sq_ft set stats ('row_count'='20', 'ndv'='19', 'num_nulls'='1', 'min_value'='73065', 'max_value'='977787', 'data_size'='80') - """ - - sql """ - alter table catalog_sales modify column cs_ext_sales_price set stats ('row_count'='1439980416', 'ndv'='1100662', 'num_nulls'='7199625', 'min_value'='0.00', 'max_value'='29943.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ext_wholesale_cost set stats ('row_count'='1439980416', 'ndv'='393180', 'num_nulls'='7199876', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_item_sk set stats ('row_count'='1439980416', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_tax set stats ('row_count'='1439980416', 'ndv'='2422238', 'num_nulls'='7200702', 'min_value'='0.00', 'max_value'='32376.27', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_date_sk set stats ('row_count'='1439980416', 'ndv'='1933', 'num_nulls'='7200707', 'min_value'='2450817', 'max_value'='2452744', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_warehouse_sk set stats ('row_count'='1439980416', 'ndv'='20', 'num_nulls'='7200688', 'min_value'='1', 'max_value'='20', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_division set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_division_name set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='able', 'max_value'='pri', 'data_size'='164') - """ - - sql """ - alter table call_center modify column cc_manager set stats ('row_count'='42', 'ndv'='28', 'num_nulls'='0', 'min_value'='Alden Snyder', 'max_value'='Wayne Ray', 'data_size'='519') - """ - - sql """ - alter table call_center modify column cc_rec_start_date set stats ('row_count'='42', 'ndv'='4', 'num_nulls'='0', 'min_value'='1998-01-01', 'max_value'='2002-01-01', 'data_size'='168') - """ - - sql """ - alter table catalog_returns modify column cr_call_center_sk set stats ('row_count'='143996756', 'ndv'='42', 'num_nulls'='2881668', 'min_value'='1', 'max_value'='42', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_net_loss set stats ('row_count'='143996756', 'ndv'='911034', 'num_nulls'='2881704', 'min_value'='0.50', 'max_value'='16095.08', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_customer_sk set stats ('row_count'='143996756', 'ndv'='12156363', 'num_nulls'='2879017', 'min_value'='1', 'max_value'='12000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_hdemo_sk set stats ('row_count'='143996756', 'ndv'='7251', 'num_nulls'='2882107', 'min_value'='1', 'max_value'='7200', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_returning_customer_sk set stats ('row_count'='143996756', 'ndv'='12157481', 'num_nulls'='2879023', 'min_value'='1', 'max_value'='12000000', 'data_size'='1151974048') - """ - - sql """ - alter table customer_address modify column ca_gmt_offset set stats ('row_count'='6000000', 'ndv'='6', 'num_nulls'='180219', 'min_value'='-10.00', 'max_value'='-5.00', 'data_size'='24000000') - """ - - sql """ - alter table item modify column i_color set stats ('row_count'='300000', 'ndv'='93', 'num_nulls'='0', 'min_value'='', 'max_value'='yellow', 'data_size'='1610293') - """ - - sql """ - alter table item modify column i_manufact set stats ('row_count'='300000', 'ndv'='1004', 'num_nulls'='0', 'min_value'='', 'max_value'='pripripri', 'data_size'='3379693') - """ - - sql """ - alter table item modify column i_product_name set stats ('row_count'='300000', 'ndv'='294994', 'num_nulls'='0', 'min_value'='', 'max_value'='pripripripripriought', 'data_size'='6849199') - """ - - sql """ - alter table web_returns modify column wr_returned_time_sk set stats ('row_count'='71997522', 'ndv'='87677', 'num_nulls'='3238574', 'min_value'='0', 'max_value'='86399', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_manager set stats ('row_count'='54', 'ndv'='40', 'num_nulls'='0', 'min_value'='', 'max_value'='William Young', 'data_size'='658') - """ - - sql """ - alter table web_site modify column web_mkt_class set stats ('row_count'='54', 'ndv'='40', 'num_nulls'='0', 'min_value'='', 'max_value'='Written, political plans show to the models. T', 'data_size'='1822') - """ - - sql """ - alter table web_site modify column web_rec_start_date set stats ('row_count'='54', 'ndv'='4', 'num_nulls'='2', 'min_value'='1997-08-16', 'max_value'='2001-08-16', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_street_number set stats ('row_count'='54', 'ndv'='36', 'num_nulls'='0', 'min_value'='', 'max_value'='983', 'data_size'='154') - """ - - sql """ - alter table promotion modify column p_channel_catalog set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1482') - """ - - sql """ - alter table promotion modify column p_promo_id set stats ('row_count'='1500', 'ndv'='1519', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPEAAAAA', 'data_size'='24000') - """ - - sql """ - alter table web_sales modify column ws_bill_customer_sk set stats ('row_count'='720000376', 'ndv'='12103729', 'num_nulls'='179817', 'min_value'='1', 'max_value'='12000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_list_price set stats ('row_count'='720000376', 'ndv'='29396', 'num_nulls'='180053', 'min_value'='1.00', 'max_value'='300.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_sales_price set stats ('row_count'='720000376', 'ndv'='29288', 'num_nulls'='180005', 'min_value'='0.00', 'max_value'='300.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_hdemo_sk set stats ('row_count'='720000376', 'ndv'='7251', 'num_nulls'='179824', 'min_value'='1', 'max_value'='7200', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_closed_date_sk set stats ('row_count'='1002', 'ndv'='163', 'num_nulls'='729', 'min_value'='2450820', 'max_value'='2451313', 'data_size'='8016') - """ - - sql """ - alter table store modify column s_division_id set stats ('row_count'='1002', 'ndv'='1', 'num_nulls'='6', 'min_value'='1', 'max_value'='1', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_market_desc set stats ('row_count'='1002', 'ndv'='765', 'num_nulls'='0', 'min_value'='', 'max_value'='Yesterday left factors handle continuing co', 'data_size'='57638') - """ - - sql """ - alter table store modify column s_market_id set stats ('row_count'='1002', 'ndv'='10', 'num_nulls'='8', 'min_value'='1', 'max_value'='10', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_state set stats ('row_count'='1002', 'ndv'='22', 'num_nulls'='0', 'min_value'='', 'max_value'='WV', 'data_size'='1994') - """ - - sql """ - alter table store modify column s_store_sk set stats ('row_count'='1002', 'ndv'='988', 'num_nulls'='0', 'min_value'='1', 'max_value'='1002', 'data_size'='8016') - """ - - sql """ - alter table store modify column s_street_name set stats ('row_count'='1002', 'ndv'='549', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodland Oak', 'data_size'='8580') - """ - - sql """ - alter table web_page modify column wp_access_date_sk set stats ('row_count'='3000', 'ndv'='101', 'num_nulls'='31', 'min_value'='2452548', 'max_value'='2452648', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_char_count set stats ('row_count'='3000', 'ndv'='1883', 'num_nulls'='42', 'min_value'='303', 'max_value'='8523', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_addr_sk set stats ('row_count'='287999764', 'ndv'='6015811', 'num_nulls'='10082311', 'min_value'='1', 'max_value'='6000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_return_time_sk set stats ('row_count'='287999764', 'ndv'='32660', 'num_nulls'='10082805', 'min_value'='28799', 'max_value'='61199', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_store_sk set stats ('row_count'='287999764', 'ndv'='499', 'num_nulls'='10081871', 'min_value'='1', 'max_value'='1000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_coupon_amt set stats ('row_count'='2879987999', 'ndv'='1161208', 'num_nulls'='129609101', 'min_value'='0.00', 'max_value'='19778.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_sales_price set stats ('row_count'='2879987999', 'ndv'='19780', 'num_nulls'='129598061', 'min_value'='0.00', 'max_value'='200.00', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_birth_country set stats ('row_count'='12000000', 'ndv'='211', 'num_nulls'='0', 'min_value'='', 'max_value'='ZIMBABWE', 'data_size'='100750845') - """ - - sql """ - alter table customer modify column c_birth_month set stats ('row_count'='12000000', 'ndv'='12', 'num_nulls'='419629', 'min_value'='1', 'max_value'='12', 'data_size'='48000000') - """ - - sql """ - alter table customer modify column c_customer_sk set stats ('row_count'='12000000', 'ndv'='12157481', 'num_nulls'='0', 'min_value'='1', 'max_value'='12000000', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_email_address set stats ('row_count'='12000000', 'ndv'='11642077', 'num_nulls'='0', 'min_value'='', 'max_value'='Zulma.Young@aDhzZzCzYN.edu', 'data_size'='318077849') - """ - - sql """ - alter table customer modify column c_last_review_date_sk set stats ('row_count'='12000000', 'ndv'='366', 'num_nulls'='419900', 'min_value'='2452283', 'max_value'='2452648', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_preferred_cust_flag set stats ('row_count'='12000000', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='11580510') - """ - - sql """ - alter table dbgen_version modify column dv_version set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='3.2.0', 'max_value'='3.2.0', 'data_size'='5') - """ - - sql """ - alter table customer_demographics modify column cd_purchase_estimate set stats ('row_count'='1920800', 'ndv'='20', 'num_nulls'='0', 'min_value'='500', 'max_value'='10000', 'data_size'='7683200') - """ - - sql """ - alter table reason modify column r_reason_id set stats ('row_count'='65', 'ndv'='65', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPDAAAAAA', 'data_size'='1040') - """ - - sql """ - alter table reason modify column r_reason_sk set stats ('row_count'='65', 'ndv'='65', 'num_nulls'='0', 'min_value'='1', 'max_value'='65', 'data_size'='520') - """ - - sql """ - alter table date_dim modify column d_current_week set stats ('row_count'='73049', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_first_dom set stats ('row_count'='73049', 'ndv'='2410', 'num_nulls'='0', 'min_value'='2415021', 'max_value'='2488070', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_fy_year set stats ('row_count'='73049', 'ndv'='202', 'num_nulls'='0', 'min_value'='1900', 'max_value'='2100', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_last_dom set stats ('row_count'='73049', 'ndv'='2419', 'num_nulls'='0', 'min_value'='2415020', 'max_value'='2488372', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_month_seq set stats ('row_count'='73049', 'ndv'='2398', 'num_nulls'='0', 'min_value'='0', 'max_value'='2400', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_quarter_name set stats ('row_count'='73049', 'ndv'='799', 'num_nulls'='0', 'min_value'='1900Q1', 'max_value'='2100Q1', 'data_size'='438294') - """ - - sql """ - alter table warehouse modify column w_county set stats ('row_count'='20', 'ndv'='14', 'num_nulls'='0', 'min_value'='Bronx County', 'max_value'='Ziebach County', 'data_size'='291') - """ - - sql """ - alter table warehouse modify column w_street_number set stats ('row_count'='20', 'ndv'='19', 'num_nulls'='0', 'min_value'='', 'max_value'='957', 'data_size'='54') - """ - - sql """ - alter table warehouse modify column w_warehouse_name set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='', 'max_value'='Therefore urg', 'data_size'='307') - """ - - sql """ - alter table catalog_sales modify column cs_ext_discount_amt set stats ('row_count'='1439980416', 'ndv'='1100115', 'num_nulls'='7201054', 'min_value'='0.00', 'max_value'='29982.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_ship_tax set stats ('row_count'='1439980416', 'ndv'='3312360', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='46593.36', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_promo_sk set stats ('row_count'='1439980416', 'ndv'='1489', 'num_nulls'='7202844', 'min_value'='1', 'max_value'='1500', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_call_center_id set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPBAAAAAA', 'data_size'='672') - """ - - sql """ - alter table call_center modify column cc_employees set stats ('row_count'='42', 'ndv'='30', 'num_nulls'='0', 'min_value'='69020', 'max_value'='6879074', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_suite_number set stats ('row_count'='42', 'ndv'='18', 'num_nulls'='0', 'min_value'='Suite 0', 'max_value'='Suite W', 'data_size'='326') - """ - - sql """ - alter table catalog_returns modify column cr_item_sk set stats ('row_count'='143996756', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_reason_sk set stats ('row_count'='143996756', 'ndv'='65', 'num_nulls'='2881950', 'min_value'='1', 'max_value'='65', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_ship_cost set stats ('row_count'='143996756', 'ndv'='483467', 'num_nulls'='2883436', 'min_value'='0.00', 'max_value'='14273.28', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_ship_mode_sk set stats ('row_count'='143996756', 'ndv'='20', 'num_nulls'='2879879', 'min_value'='1', 'max_value'='20', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_store_credit set stats ('row_count'='143996756', 'ndv'='802237', 'num_nulls'='2880469', 'min_value'='0.00', 'max_value'='23215.15', 'data_size'='575987024') - """ - - sql """ - alter table customer_address modify column ca_city set stats ('row_count'='6000000', 'ndv'='977', 'num_nulls'='0', 'min_value'='', 'max_value'='Zion', 'data_size'='52096290') - """ - - sql """ - alter table customer_address modify column ca_state set stats ('row_count'='6000000', 'ndv'='52', 'num_nulls'='0', 'min_value'='', 'max_value'='WY', 'data_size'='11640128') - """ - - sql """ - alter table customer_address modify column ca_street_name set stats ('row_count'='6000000', 'ndv'='8173', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodland Woodland', 'data_size'='50697257') - """ - - sql """ - alter table customer_address modify column ca_street_type set stats ('row_count'='6000000', 'ndv'='21', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='24441630') - """ - - sql """ - alter table catalog_page modify column cp_catalog_number set stats ('row_count'='30000', 'ndv'='109', 'num_nulls'='297', 'min_value'='1', 'max_value'='109', 'data_size'='120000') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_number set stats ('row_count'='30000', 'ndv'='279', 'num_nulls'='294', 'min_value'='1', 'max_value'='277', 'data_size'='120000') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_sk set stats ('row_count'='30000', 'ndv'='30439', 'num_nulls'='0', 'min_value'='1', 'max_value'='30000', 'data_size'='240000') - """ - - sql """ - alter table catalog_page modify column cp_start_date_sk set stats ('row_count'='30000', 'ndv'='91', 'num_nulls'='286', 'min_value'='2450815', 'max_value'='2453005', 'data_size'='120000') - """ - - sql """ - alter table item modify column i_rec_start_date set stats ('row_count'='300000', 'ndv'='4', 'num_nulls'='784', 'min_value'='1997-10-27', 'max_value'='2001-10-27', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_units set stats ('row_count'='300000', 'ndv'='22', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='1253652') - """ - - sql """ - alter table web_returns modify column wr_refunded_hdemo_sk set stats ('row_count'='71997522', 'ndv'='7251', 'num_nulls'='3238545', 'min_value'='1', 'max_value'='7200', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_return_ship_cost set stats ('row_count'='71997522', 'ndv'='451263', 'num_nulls'='3239048', 'min_value'='0.00', 'max_value'='14352.10', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returned_date_sk set stats ('row_count'='71997522', 'ndv'='2188', 'num_nulls'='3239259', 'min_value'='2450819', 'max_value'='2453002', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_returning_cdemo_sk set stats ('row_count'='71997522', 'ndv'='1916366', 'num_nulls'='3239192', 'min_value'='1', 'max_value'='1920800', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_suite_number set stats ('row_count'='54', 'ndv'='38', 'num_nulls'='0', 'min_value'='Suite 100', 'max_value'='Suite Y', 'data_size'='430') - """ - - sql """ - alter table promotion modify column p_start_date_sk set stats ('row_count'='1500', 'ndv'='685', 'num_nulls'='23', 'min_value'='2450096', 'max_value'='2450915', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_coupon_amt set stats ('row_count'='720000376', 'ndv'='1505315', 'num_nulls'='179933', 'min_value'='0.00', 'max_value'='28824.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ext_wholesale_cost set stats ('row_count'='720000376', 'ndv'='393180', 'num_nulls'='180060', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_ship set stats ('row_count'='720000376', 'ndv'='2414838', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='44263.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_date_sk set stats ('row_count'='720000376', 'ndv'='1952', 'num_nulls'='180011', 'min_value'='2450817', 'max_value'='2452762', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_web_page_sk set stats ('row_count'='720000376', 'ndv'='2984', 'num_nulls'='179732', 'min_value'='1', 'max_value'='3000', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_country set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='12961') - """ - - sql """ - alter table store modify column s_store_name set stats ('row_count'='1002', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='3916') - """ - - sql """ - alter table time_dim modify column t_second set stats ('row_count'='86400', 'ndv'='60', 'num_nulls'='0', 'min_value'='0', 'max_value'='59', 'data_size'='345600') - """ - - sql """ - alter table time_dim modify column t_sub_shift set stats ('row_count'='86400', 'ndv'='4', 'num_nulls'='0', 'min_value'='afternoon', 'max_value'='night', 'data_size'='597600') - """ - - sql """ - alter table web_page modify column wp_image_count set stats ('row_count'='3000', 'ndv'='7', 'num_nulls'='26', 'min_value'='1', 'max_value'='7', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_type set stats ('row_count'='3000', 'ndv'='8', 'num_nulls'='0', 'min_value'='', 'max_value'='welcome', 'data_size'='18867') - """ - - sql """ - alter table store_returns modify column sr_customer_sk set stats ('row_count'='287999764', 'ndv'='12157481', 'num_nulls'='10081624', 'min_value'='1', 'max_value'='12000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_hdemo_sk set stats ('row_count'='287999764', 'ndv'='7251', 'num_nulls'='10083275', 'min_value'='1', 'max_value'='7200', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_addr_sk set stats ('row_count'='2879987999', 'ndv'='6015811', 'num_nulls'='129589799', 'min_value'='1', 'max_value'='6000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_item_sk set stats ('row_count'='2879987999', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_quantity set stats ('row_count'='2879987999', 'ndv'='100', 'num_nulls'='129584258', 'min_value'='1', 'max_value'='100', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ticket_number set stats ('row_count'='2879987999', 'ndv'='238830448', 'num_nulls'='0', 'min_value'='1', 'max_value'='240000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_wholesale_cost set stats ('row_count'='2879987999', 'ndv'='9905', 'num_nulls'='129590273', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='11519951996') - """ - - sql """ - alter table ship_mode modify column sm_type set stats ('row_count'='20', 'ndv'='6', 'num_nulls'='0', 'min_value'='EXPRESS', 'max_value'='TWO DAY', 'data_size'='150') - """ - - sql """ - alter table customer modify column c_current_addr_sk set stats ('row_count'='12000000', 'ndv'='5243359', 'num_nulls'='0', 'min_value'='3', 'max_value'='6000000', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_last_name set stats ('row_count'='12000000', 'ndv'='4990', 'num_nulls'='0', 'min_value'='', 'max_value'='Zuniga', 'data_size'='70991730') - """ - - sql """ - alter table dbgen_version modify column dv_cmdline_args set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='-SCALE 1000 -PARALLEL 64 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/tpcds1t/tpcds-data', 'max_value'='-SCALE 1000 -PARALLEL 64 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/tpcds1t/tpcds-data', 'data_size'='86') - """ - - sql """ - alter table date_dim modify column d_current_quarter set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_date_sk set stats ('row_count'='73049', 'ndv'='73042', 'num_nulls'='0', 'min_value'='2415022', 'max_value'='2488070', 'data_size'='584392') - """ - - sql """ - alter table date_dim modify column d_holiday set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table warehouse modify column w_country set stats ('row_count'='20', 'ndv'='1', 'num_nulls'='0', 'min_value'='United States', 'max_value'='United States', 'data_size'='260') - """ - - sql """ - alter table warehouse modify column w_state set stats ('row_count'='20', 'ndv'='13', 'num_nulls'='0', 'min_value'='AL', 'max_value'='TN', 'data_size'='40') - """ - - sql """ - alter table catalog_sales modify column cs_bill_addr_sk set stats ('row_count'='1439980416', 'ndv'='6015811', 'num_nulls'='7199539', 'min_value'='1', 'max_value'='6000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_bill_customer_sk set stats ('row_count'='1439980416', 'ndv'='12157481', 'num_nulls'='7201919', 'min_value'='1', 'max_value'='12000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid set stats ('row_count'='1439980416', 'ndv'='1809875', 'num_nulls'='7197668', 'min_value'='0.00', 'max_value'='29943.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_addr_sk set stats ('row_count'='1439980416', 'ndv'='6015811', 'num_nulls'='7198232', 'min_value'='1', 'max_value'='6000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_mode_sk set stats ('row_count'='1439980416', 'ndv'='20', 'num_nulls'='7201083', 'min_value'='1', 'max_value'='20', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_sold_date_sk set stats ('row_count'='1439980416', 'ndv'='1835', 'num_nulls'='7203326', 'min_value'='2450815', 'max_value'='2452654', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_sold_time_sk set stats ('row_count'='1439980416', 'ndv'='87677', 'num_nulls'='7201329', 'min_value'='0', 'max_value'='86399', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_wholesale_cost set stats ('row_count'='1439980416', 'ndv'='9905', 'num_nulls'='7201098', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_company_name set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='able', 'max_value'='pri', 'data_size'='160') - """ - - sql """ - alter table call_center modify column cc_market_manager set stats ('row_count'='42', 'ndv'='35', 'num_nulls'='0', 'min_value'='Cesar Allen', 'max_value'='William Larsen', 'data_size'='524') - """ - - sql """ - alter table call_center modify column cc_mkt_id set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_street_type set stats ('row_count'='42', 'ndv'='11', 'num_nulls'='0', 'min_value'='Avenue', 'max_value'='Way', 'data_size'='184') - """ - - sql """ - alter table catalog_returns modify column cr_return_tax set stats ('row_count'='143996756', 'ndv'='149828', 'num_nulls'='2881611', 'min_value'='0.00', 'max_value'='2511.58', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returning_cdemo_sk set stats ('row_count'='143996756', 'ndv'='1916366', 'num_nulls'='2880543', 'min_value'='1', 'max_value'='1920800', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_returning_hdemo_sk set stats ('row_count'='143996756', 'ndv'='7251', 'num_nulls'='2882692', 'min_value'='1', 'max_value'='7200', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_reversed_charge set stats ('row_count'='143996756', 'ndv'='802509', 'num_nulls'='2881215', 'min_value'='0.00', 'max_value'='24033.84', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_warehouse_sk set stats ('row_count'='143996756', 'ndv'='20', 'num_nulls'='2882192', 'min_value'='1', 'max_value'='20', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_demo_sk set stats ('row_count'='7200', 'ndv'='7251', 'num_nulls'='0', 'min_value'='1', 'max_value'='7200', 'data_size'='57600') - """ - - sql """ - alter table household_demographics modify column hd_vehicle_count set stats ('row_count'='7200', 'ndv'='6', 'num_nulls'='0', 'min_value'='-1', 'max_value'='4', 'data_size'='28800') - """ - - sql """ - alter table customer_address modify column ca_zip set stats ('row_count'='6000000', 'ndv'='9253', 'num_nulls'='0', 'min_value'='', 'max_value'='99981', 'data_size'='29097610') - """ - - sql """ - alter table income_band modify column ib_income_band_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table catalog_page modify column cp_type set stats ('row_count'='30000', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='quarterly', 'data_size'='227890') - """ - - sql """ - alter table item modify column i_brand set stats ('row_count'='300000', 'ndv'='714', 'num_nulls'='0', 'min_value'='', 'max_value'='univunivamalg #9', 'data_size'='4834917') - """ - - sql """ - alter table item modify column i_formulation set stats ('row_count'='300000', 'ndv'='224757', 'num_nulls'='0', 'min_value'='', 'max_value'='yellow98911509228741', 'data_size'='5984460') - """ - - sql """ - alter table item modify column i_item_desc set stats ('row_count'='300000', 'ndv'='217721', 'num_nulls'='0', 'min_value'='', 'max_value'='Youngsters used to save quite colour', 'data_size'='30093342') - """ - - sql """ - alter table web_returns modify column wr_fee set stats ('row_count'='71997522', 'ndv'='9958', 'num_nulls'='3238926', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_item_sk set stats ('row_count'='71997522', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_reason_sk set stats ('row_count'='71997522', 'ndv'='65', 'num_nulls'='3238897', 'min_value'='1', 'max_value'='65', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_refunded_customer_sk set stats ('row_count'='71997522', 'ndv'='12117831', 'num_nulls'='3242433', 'min_value'='1', 'max_value'='12000000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_city set stats ('row_count'='54', 'ndv'='31', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodlawn', 'data_size'='491') - """ - - sql """ - alter table web_site modify column web_close_date_sk set stats ('row_count'='54', 'ndv'='18', 'num_nulls'='10', 'min_value'='2441265', 'max_value'='2446218', 'data_size'='432') - """ - - sql """ - alter table web_site modify column web_company_id set stats ('row_count'='54', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_company_name set stats ('row_count'='54', 'ndv'='7', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='203') - """ - - sql """ - alter table web_site modify column web_county set stats ('row_count'='54', 'ndv'='25', 'num_nulls'='0', 'min_value'='', 'max_value'='Williamson County', 'data_size'='762') - """ - - sql """ - alter table web_site modify column web_name set stats ('row_count'='54', 'ndv'='10', 'num_nulls'='0', 'min_value'='', 'max_value'='site_8', 'data_size'='312') - """ - - sql """ - alter table web_site modify column web_open_date_sk set stats ('row_count'='54', 'ndv'='27', 'num_nulls'='1', 'min_value'='2450373', 'max_value'='2450807', 'data_size'='432') - """ - - sql """ - alter table promotion modify column p_channel_dmail set stats ('row_count'='1500', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='1483') - """ - - sql """ - alter table promotion modify column p_channel_press set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1481') - """ - - sql """ - alter table promotion modify column p_channel_radio set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1479') - """ - - sql """ - alter table promotion modify column p_cost set stats ('row_count'='1500', 'ndv'='1', 'num_nulls'='18', 'min_value'='1000.00', 'max_value'='1000.00', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_ext_tax set stats ('row_count'='720000376', 'ndv'='211413', 'num_nulls'='179695', 'min_value'='0.00', 'max_value'='2682.90', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_item_sk set stats ('row_count'='720000376', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_net_paid set stats ('row_count'='720000376', 'ndv'='1749360', 'num_nulls'='179970', 'min_value'='0.00', 'max_value'='29810.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_ship_tax set stats ('row_count'='720000376', 'ndv'='3224829', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='46004.19', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_tax set stats ('row_count'='720000376', 'ndv'='2354996', 'num_nulls'='179972', 'min_value'='0.00', 'max_value'='32492.90', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_order_number set stats ('row_count'='720000376', 'ndv'='60401176', 'num_nulls'='0', 'min_value'='1', 'max_value'='60000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_quantity set stats ('row_count'='720000376', 'ndv'='100', 'num_nulls'='179781', 'min_value'='1', 'max_value'='100', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_cdemo_sk set stats ('row_count'='720000376', 'ndv'='1916366', 'num_nulls'='180290', 'min_value'='1', 'max_value'='1920800', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_sold_time_sk set stats ('row_count'='720000376', 'ndv'='87677', 'num_nulls'='179980', 'min_value'='0', 'max_value'='86399', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_street_type set stats ('row_count'='1002', 'ndv'='21', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='4189') - """ - - sql """ - alter table web_page modify column wp_autogen_flag set stats ('row_count'='3000', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='2962') - """ - - sql """ - alter table web_page modify column wp_rec_start_date set stats ('row_count'='3000', 'ndv'='4', 'num_nulls'='29', 'min_value'='1997-09-03', 'max_value'='2001-09-03', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_net_loss set stats ('row_count'='287999764', 'ndv'='714210', 'num_nulls'='10080716', 'min_value'='0.50', 'max_value'='10776.08', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_amt_inc_tax set stats ('row_count'='287999764', 'ndv'='1259368', 'num_nulls'='10076879', 'min_value'='0.00', 'max_value'='20454.63', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_quantity set stats ('row_count'='287999764', 'ndv'='100', 'num_nulls'='10082815', 'min_value'='1', 'max_value'='100', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_ship_cost set stats ('row_count'='287999764', 'ndv'='355844', 'num_nulls'='10081927', 'min_value'='0.00', 'max_value'='9767.34', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_reversed_charge set stats ('row_count'='287999764', 'ndv'='700618', 'num_nulls'='10085976', 'min_value'='0.00', 'max_value'='17339.42', 'data_size'='1151999056') - """ - - sql """ - alter table store_sales modify column ss_net_paid_inc_tax set stats ('row_count'='2879987999', 'ndv'='1681767', 'num_nulls'='129609050', 'min_value'='0.00', 'max_value'='21769.48', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_birth_day set stats ('row_count'='12000000', 'ndv'='31', 'num_nulls'='420361', 'min_value'='1', 'max_value'='31', 'data_size'='48000000') - """ - - sql """ - alter table customer_demographics modify column cd_credit_rating set stats ('row_count'='1920800', 'ndv'='4', 'num_nulls'='0', 'min_value'='Good', 'max_value'='Unknown', 'data_size'='13445600') - """ - - sql """ - alter table customer_demographics modify column cd_demo_sk set stats ('row_count'='1920800', 'ndv'='1916366', 'num_nulls'='0', 'min_value'='1', 'max_value'='1920800', 'data_size'='15366400') - """ - - sql """ - alter table customer_demographics modify column cd_dep_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table customer_demographics modify column cd_education_status set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='2 yr Degree', 'max_value'='Unknown', 'data_size'='18384800') - """ - - sql """ - alter table customer_demographics modify column cd_gender set stats ('row_count'='1920800', 'ndv'='2', 'num_nulls'='0', 'min_value'='F', 'max_value'='M', 'data_size'='1920800') - """ - - sql """ - alter table customer_demographics modify column cd_marital_status set stats ('row_count'='1920800', 'ndv'='5', 'num_nulls'='0', 'min_value'='D', 'max_value'='W', 'data_size'='1920800') - """ - - sql """ - alter table date_dim modify column d_date_id set stats ('row_count'='73049', 'ndv'='72907', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'data_size'='1168784') - """ - - sql """ - alter table date_dim modify column d_fy_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'num_nulls'='0', 'min_value'='1', 'max_value'='10436', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_year set stats ('row_count'='73049', 'ndv'='202', 'num_nulls'='0', 'min_value'='1900', 'max_value'='2100', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_warehouse_id set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'data_size'='320') - """ - - sql """ - alter table catalog_sales modify column cs_ext_list_price set stats ('row_count'='1439980416', 'ndv'='1160303', 'num_nulls'='7199542', 'min_value'='1.00', 'max_value'='30000.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ext_tax set stats ('row_count'='1439980416', 'ndv'='215267', 'num_nulls'='7200412', 'min_value'='0.00', 'max_value'='2673.27', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_quantity set stats ('row_count'='1439980416', 'ndv'='100', 'num_nulls'='7202885', 'min_value'='1', 'max_value'='100', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_cdemo_sk set stats ('row_count'='1439980416', 'ndv'='1916366', 'num_nulls'='7200151', 'min_value'='1', 'max_value'='1920800', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_customer_sk set stats ('row_count'='1439980416', 'ndv'='12157481', 'num_nulls'='7201507', 'min_value'='1', 'max_value'='12000000', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_company set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_mkt_desc set stats ('row_count'='42', 'ndv'='33', 'num_nulls'='0', 'min_value'='Arms increase controversial, present so', 'max_value'='Young tests could buy comfortable, local users; o', 'data_size'='2419') - """ - - sql """ - alter table call_center modify column cc_open_date_sk set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='2450794', 'max_value'='2451146', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_rec_end_date set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='21', 'min_value'='2000-01-01', 'max_value'='2001-12-31', 'data_size'='168') - """ - - sql """ - alter table catalog_returns modify column cr_order_number set stats ('row_count'='143996756', 'ndv'='93476424', 'num_nulls'='0', 'min_value'='2', 'max_value'='160000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_amount set stats ('row_count'='143996756', 'ndv'='882831', 'num_nulls'='2880424', 'min_value'='0.00', 'max_value'='28805.04', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returned_date_sk set stats ('row_count'='143996756', 'ndv'='2108', 'num_nulls'='0', 'min_value'='2450821', 'max_value'='2452924', 'data_size'='1151974048') - """ - - sql """ - alter table income_band modify column ib_upper_bound set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='10000', 'max_value'='200000', 'data_size'='80') - """ - - sql """ - alter table catalog_page modify column cp_department set stats ('row_count'='30000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='DEPARTMENT', 'data_size'='297110') - """ - - sql """ - alter table catalog_page modify column cp_end_date_sk set stats ('row_count'='30000', 'ndv'='97', 'num_nulls'='302', 'min_value'='2450844', 'max_value'='2453186', 'data_size'='120000') - """ - - sql """ - alter table item modify column i_brand_id set stats ('row_count'='300000', 'ndv'='951', 'num_nulls'='763', 'min_value'='1001001', 'max_value'='10016017', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_category set stats ('row_count'='300000', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='Women', 'data_size'='1766742') - """ - - sql """ - alter table item modify column i_class_id set stats ('row_count'='300000', 'ndv'='16', 'num_nulls'='722', 'min_value'='1', 'max_value'='16', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_item_sk set stats ('row_count'='300000', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='2400000') - """ - - sql """ - alter table item modify column i_manufact_id set stats ('row_count'='300000', 'ndv'='1005', 'num_nulls'='761', 'min_value'='1', 'max_value'='1000', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_wholesale_cost set stats ('row_count'='300000', 'ndv'='7243', 'num_nulls'='740', 'min_value'='0.02', 'max_value'='89.49', 'data_size'='1200000') - """ - - sql """ - alter table web_returns modify column wr_refunded_cdemo_sk set stats ('row_count'='71997522', 'ndv'='1916366', 'num_nulls'='3240352', 'min_value'='1', 'max_value'='1920800', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_return_tax set stats ('row_count'='71997522', 'ndv'='137392', 'num_nulls'='3237729', 'min_value'='0.00', 'max_value'='2551.16', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returning_hdemo_sk set stats ('row_count'='71997522', 'ndv'='7251', 'num_nulls'='3238239', 'min_value'='1', 'max_value'='7200', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_web_page_sk set stats ('row_count'='71997522', 'ndv'='2984', 'num_nulls'='3240387', 'min_value'='1', 'max_value'='3000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_class set stats ('row_count'='54', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='371') - """ - - sql """ - alter table web_site modify column web_zip set stats ('row_count'='54', 'ndv'='32', 'num_nulls'='0', 'min_value'='14593', 'max_value'='99431', 'data_size'='270') - """ - - sql """ - alter table promotion modify column p_channel_email set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1480') - """ - - sql """ - alter table promotion modify column p_item_sk set stats ('row_count'='1500', 'ndv'='1467', 'num_nulls'='19', 'min_value'='184', 'max_value'='299990', 'data_size'='12000') - """ - - sql """ - alter table promotion modify column p_promo_name set stats ('row_count'='1500', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='5896') - """ - - sql """ - alter table web_sales modify column ws_ext_discount_amt set stats ('row_count'='720000376', 'ndv'='1093513', 'num_nulls'='179851', 'min_value'='0.00', 'max_value'='29982.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ext_list_price set stats ('row_count'='720000376', 'ndv'='1160303', 'num_nulls'='179866', 'min_value'='1.00', 'max_value'='30000.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_wholesale_cost set stats ('row_count'='720000376', 'ndv'='9905', 'num_nulls'='179834', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='2880001504') - """ - - sql """ - alter table store modify column s_market_manager set stats ('row_count'='1002', 'ndv'='732', 'num_nulls'='0', 'min_value'='', 'max_value'='Zane Perez', 'data_size'='12823') - """ - - sql """ - alter table store modify column s_number_employees set stats ('row_count'='1002', 'ndv'='101', 'num_nulls'='8', 'min_value'='200', 'max_value'='300', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_rec_end_date set stats ('row_count'='1002', 'ndv'='3', 'num_nulls'='501', 'min_value'='1999-03-13', 'max_value'='2001-03-12', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_rec_start_date set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='7', 'min_value'='1997-03-13', 'max_value'='2001-03-13', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_suite_number set stats ('row_count'='1002', 'ndv'='76', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite Y', 'data_size'='7866') - """ - - sql """ - alter table time_dim modify column t_hour set stats ('row_count'='86400', 'ndv'='24', 'num_nulls'='0', 'min_value'='0', 'max_value'='23', 'data_size'='345600') - """ - - sql """ - alter table time_dim modify column t_shift set stats ('row_count'='86400', 'ndv'='3', 'num_nulls'='0', 'min_value'='first', 'max_value'='third', 'data_size'='460800') - """ - - sql """ - alter table web_page modify column wp_link_count set stats ('row_count'='3000', 'ndv'='24', 'num_nulls'='27', 'min_value'='2', 'max_value'='25', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_rec_end_date set stats ('row_count'='3000', 'ndv'='3', 'num_nulls'='1500', 'min_value'='1999-09-03', 'max_value'='2001-09-02', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_cdemo_sk set stats ('row_count'='287999764', 'ndv'='1916366', 'num_nulls'='10076902', 'min_value'='1', 'max_value'='1920800', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_item_sk set stats ('row_count'='287999764', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_cdemo_sk set stats ('row_count'='2879987999', 'ndv'='1916366', 'num_nulls'='129602155', 'min_value'='1', 'max_value'='1920800', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_ext_discount_amt set stats ('row_count'='2879987999', 'ndv'='1161208', 'num_nulls'='129609101', 'min_value'='0.00', 'max_value'='19778.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ext_wholesale_cost set stats ('row_count'='2879987999', 'ndv'='393180', 'num_nulls'='129595018', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_list_price set stats ('row_count'='2879987999', 'ndv'='19640', 'num_nulls'='129597020', 'min_value'='1.00', 'max_value'='200.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_net_paid set stats ('row_count'='2879987999', 'ndv'='1288646', 'num_nulls'='129599407', 'min_value'='0.00', 'max_value'='19972.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_sold_date_sk set stats ('row_count'='2879987999', 'ndv'='1820', 'num_nulls'='129600843', 'min_value'='2450816', 'max_value'='2452642', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_sold_time_sk set stats ('row_count'='2879987999', 'ndv'='47252', 'num_nulls'='129593012', 'min_value'='28800', 'max_value'='75599', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_carrier set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AIRBORNE', 'max_value'='ZOUROS', 'data_size'='133') - """ - - sql """ - alter table customer modify column c_birth_year set stats ('row_count'='12000000', 'ndv'='69', 'num_nulls'='419584', 'min_value'='1924', 'max_value'='1992', 'data_size'='48000000') - """ - - sql """ - alter table customer modify column c_login set stats ('row_count'='12000000', 'ndv'='1', 'num_nulls'='0', 'min_value'='', 'max_value'='', 'data_size'='0') - """ - - sql """ - alter table customer modify column c_salutation set stats ('row_count'='12000000', 'ndv'='7', 'num_nulls'='0', 'min_value'='', 'max_value'='Sir', 'data_size'='37544445') - """ - - sql """ - alter table reason modify column r_reason_desc set stats ('row_count'='65', 'ndv'='64', 'num_nulls'='0', 'min_value'='Did not fit', 'max_value'='unauthoized purchase', 'data_size'='848') - """ - - sql """ - alter table date_dim modify column d_current_year set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_dom set stats ('row_count'='73049', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_same_day_lq set stats ('row_count'='73049', 'ndv'='72231', 'num_nulls'='0', 'min_value'='2414930', 'max_value'='2487978', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'num_nulls'='0', 'min_value'='1', 'max_value'='10436', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_weekend set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table warehouse modify column w_zip set stats ('row_count'='20', 'ndv'='18', 'num_nulls'='0', 'min_value'='19231', 'max_value'='89275', 'data_size'='100') - """ - - sql """ - alter table catalog_sales modify column cs_catalog_page_sk set stats ('row_count'='1439980416', 'ndv'='17005', 'num_nulls'='7199032', 'min_value'='1', 'max_value'='25207', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_coupon_amt set stats ('row_count'='1439980416', 'ndv'='1578778', 'num_nulls'='7198116', 'min_value'='0.00', 'max_value'='28730.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_list_price set stats ('row_count'='1439980416', 'ndv'='29396', 'num_nulls'='7201549', 'min_value'='1.00', 'max_value'='300.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_net_profit set stats ('row_count'='1439980416', 'ndv'='2058398', 'num_nulls'='0', 'min_value'='-10000.00', 'max_value'='19962.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_order_number set stats ('row_count'='1439980416', 'ndv'='159051824', 'num_nulls'='0', 'min_value'='1', 'max_value'='160000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_hdemo_sk set stats ('row_count'='1439980416', 'ndv'='7251', 'num_nulls'='7201542', 'min_value'='1', 'max_value'='7200', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_call_center_sk set stats ('row_count'='42', 'ndv'='42', 'num_nulls'='0', 'min_value'='1', 'max_value'='42', 'data_size'='336') - """ - - sql """ - alter table call_center modify column cc_city set stats ('row_count'='42', 'ndv'='17', 'num_nulls'='0', 'min_value'='Antioch', 'max_value'='Spring Hill', 'data_size'='386') - """ - - sql """ - alter table call_center modify column cc_closed_date_sk set stats ('row_count'='42', 'ndv'='0', 'num_nulls'='42', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_gmt_offset set stats ('row_count'='42', 'ndv'='4', 'num_nulls'='0', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_hours set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='0', 'min_value'='8AM-12AM', 'max_value'='8AM-8AM', 'data_size'='300') - """ - - sql """ - alter table call_center modify column cc_street_number set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='38', 'max_value'='999', 'data_size'='120') - """ - - sql """ - alter table call_center modify column cc_tax_percentage set stats ('row_count'='42', 'ndv'='12', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='0.12', 'data_size'='168') - """ - - sql """ - alter table inventory modify column inv_date_sk set stats ('row_count'='783000000', 'ndv'='261', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2452635', 'data_size'='6264000000') - """ - - sql """ - alter table inventory modify column inv_item_sk set stats ('row_count'='783000000', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='6264000000') - """ - - sql """ - alter table catalog_returns modify column cr_fee set stats ('row_count'='143996756', 'ndv'='9958', 'num_nulls'='2882168', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_return_quantity set stats ('row_count'='143996756', 'ndv'='100', 'num_nulls'='2878774', 'min_value'='1', 'max_value'='100', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returned_time_sk set stats ('row_count'='143996756', 'ndv'='87677', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_dep_count set stats ('row_count'='7200', 'ndv'='10', 'num_nulls'='0', 'min_value'='0', 'max_value'='9', 'data_size'='28800') - """ - - sql """ - alter table customer_address modify column ca_county set stats ('row_count'='6000000', 'ndv'='1825', 'num_nulls'='0', 'min_value'='', 'max_value'='Ziebach County', 'data_size'='81254984') - """ - - sql """ - alter table income_band modify column ib_lower_bound set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='0', 'max_value'='190001', 'data_size'='80') - """ - - sql """ - alter table item modify column i_category_id set stats ('row_count'='300000', 'ndv'='10', 'num_nulls'='766', 'min_value'='1', 'max_value'='10', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_class set stats ('row_count'='300000', 'ndv'='100', 'num_nulls'='0', 'min_value'='', 'max_value'='womens watch', 'data_size'='2331199') - """ - - sql """ - alter table item modify column i_container set stats ('row_count'='300000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='2094652') - """ - - sql """ - alter table item modify column i_current_price set stats ('row_count'='300000', 'ndv'='9685', 'num_nulls'='775', 'min_value'='0.09', 'max_value'='99.99', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_manager_id set stats ('row_count'='300000', 'ndv'='100', 'num_nulls'='744', 'min_value'='1', 'max_value'='100', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_size set stats ('row_count'='300000', 'ndv'='8', 'num_nulls'='0', 'min_value'='', 'max_value'='small', 'data_size'='1296134') - """ - - sql """ - alter table web_returns modify column wr_order_number set stats ('row_count'='71997522', 'ndv'='42383708', 'num_nulls'='0', 'min_value'='1', 'max_value'='60000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_refunded_cash set stats ('row_count'='71997522', 'ndv'='955369', 'num_nulls'='3240493', 'min_value'='0.00', 'max_value'='26992.92', 'data_size'='287990088') - """ - - sql """ - alter table web_site modify column web_country set stats ('row_count'='54', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='689') - """ - - sql """ - alter table web_site modify column web_gmt_offset set stats ('row_count'='54', 'ndv'='4', 'num_nulls'='1', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_market_manager set stats ('row_count'='54', 'ndv'='46', 'num_nulls'='0', 'min_value'='', 'max_value'='Zachery Oneil', 'data_size'='691') - """ - - sql """ - alter table web_site modify column web_site_sk set stats ('row_count'='54', 'ndv'='54', 'num_nulls'='0', 'min_value'='1', 'max_value'='54', 'data_size'='432') - """ - - sql """ - alter table web_site modify column web_street_name set stats ('row_count'='54', 'ndv'='53', 'num_nulls'='0', 'min_value'='', 'max_value'='Wilson Ridge', 'data_size'='471') - """ - - sql """ - alter table web_site modify column web_tax_percentage set stats ('row_count'='54', 'ndv'='13', 'num_nulls'='1', 'min_value'='0.00', 'max_value'='0.12', 'data_size'='216') - """ - - sql """ - alter table promotion modify column p_channel_tv set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1481') - """ - - sql """ - alter table promotion modify column p_response_targe set stats ('row_count'='1500', 'ndv'='1', 'num_nulls'='27', 'min_value'='1', 'max_value'='1', 'data_size'='6000') - """ - - sql """ - alter table web_sales modify column ws_bill_addr_sk set stats ('row_count'='720000376', 'ndv'='6015742', 'num_nulls'='179648', 'min_value'='1', 'max_value'='6000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ext_sales_price set stats ('row_count'='720000376', 'ndv'='1091003', 'num_nulls'='180023', 'min_value'='0.00', 'max_value'='29810.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_profit set stats ('row_count'='720000376', 'ndv'='2014057', 'num_nulls'='0', 'min_value'='-10000.00', 'max_value'='19840.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_promo_sk set stats ('row_count'='720000376', 'ndv'='1489', 'num_nulls'='180016', 'min_value'='1', 'max_value'='1500', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ship_customer_sk set stats ('row_count'='720000376', 'ndv'='12074547', 'num_nulls'='179966', 'min_value'='1', 'max_value'='12000000', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_division_name set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6965') - """ - - sql """ - alter table store modify column s_floor_space set stats ('row_count'='1002', 'ndv'='752', 'num_nulls'='6', 'min_value'='5002549', 'max_value'='9997773', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_tax_precentage set stats ('row_count'='1002', 'ndv'='12', 'num_nulls'='8', 'min_value'='0.00', 'max_value'='0.11', 'data_size'='4008') - """ - - sql """ - alter table time_dim modify column t_time_id set stats ('row_count'='86400', 'ndv'='85663', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPAAAA', 'data_size'='1382400') - """ - - sql """ - alter table time_dim modify column t_time_sk set stats ('row_count'='86400', 'ndv'='87677', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='691200') - """ - - sql """ - alter table store_returns modify column sr_fee set stats ('row_count'='287999764', 'ndv'='9958', 'num_nulls'='10081860', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_reason_sk set stats ('row_count'='287999764', 'ndv'='65', 'num_nulls'='10087936', 'min_value'='1', 'max_value'='65', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_store_credit set stats ('row_count'='287999764', 'ndv'='698161', 'num_nulls'='10077188', 'min_value'='0.00', 'max_value'='17792.48', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_ticket_number set stats ('row_count'='287999764', 'ndv'='168770768', 'num_nulls'='0', 'min_value'='1', 'max_value'='240000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_ext_list_price set stats ('row_count'='2879987999', 'ndv'='770971', 'num_nulls'='129593800', 'min_value'='1.00', 'max_value'='20000.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ext_sales_price set stats ('row_count'='2879987999', 'ndv'='754248', 'num_nulls'='129589177', 'min_value'='0.00', 'max_value'='19972.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_net_profit set stats ('row_count'='2879987999', 'ndv'='1497362', 'num_nulls'='129572933', 'min_value'='-10000.00', 'max_value'='9986.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_promo_sk set stats ('row_count'='2879987999', 'ndv'='1489', 'num_nulls'='129597096', 'min_value'='1', 'max_value'='1500', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_code set stats ('row_count'='20', 'ndv'='4', 'num_nulls'='0', 'min_value'='AIR', 'max_value'='SURFACE', 'data_size'='87') - """ - - sql """ - alter table ship_mode modify column sm_contract set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='2mM8l', 'max_value'='yVfotg7Tio3MVhBg6Bkn', 'data_size'='252') - """ - - sql """ - alter table customer modify column c_current_hdemo_sk set stats ('row_count'='12000000', 'ndv'='7251', 'num_nulls'='418736', 'min_value'='1', 'max_value'='7200', 'data_size'='96000000') - """ - - sql """ - alter table dbgen_version modify column dv_create_date set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='2023-07-06', 'max_value'='2023-07-06', 'data_size'='4') - """ - - sql """ - alter table dbgen_version modify column dv_create_time set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='2017-05-13 00:00:00', 'max_value'='2017-05-13 00:00:00', 'data_size'='8') - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/query23.groovy deleted file mode 100644 index 82db5a725ce44a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/constraints/query23.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(context.file) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100""" - qt_ds_shape_23 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_rf_prune.py b/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_rf_prune.py deleted file mode 100644 index 60e9004c90e13b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_rf_prune.py +++ /dev/null @@ -1,26 +0,0 @@ -# // Licensed to the Apache Software Foundation (ASF) under one -# // or more contributor license agreements. See the NOTICE file -# // distributed with this work for additional information -# // regarding copyright ownership. The ASF licenses this file -# // to you under the Apache License, Version 2.0 (the -# // "License"); you may not use this file except in compliance -# // with the License. You may obtain a copy of the License at -# // -# // http://www.apache.org/licenses/LICENSE-2.0 -# // -# // Unless required by applicable law or agreed to in writing, -# // software distributed under the License is distributed on an -# // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# // KIND, either express or implied. See the License for the -# // specific language governing permissions and limitations -# // under the License. -if __name__ == '__main__': - with open('rf_prune.tmpl', 'r') as f: - tmpl = f.read() - for i in range(1,100): - with open('../../../../tools/tpcds-tools/queries/sf100/query'+str(i)+'.sql', 'r') as fi: - casei = tmpl.replace('{--}', str(i)) - casei = casei.replace('{query}', fi.read().split(";")[0]) - - with open('../rf_prune/query'+str(i)+'.groovy', 'w') as out: - out.write(casei) \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_shape.py b/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_shape.py deleted file mode 100644 index 3fde2ac6936367..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/gen_shape.py +++ /dev/null @@ -1,26 +0,0 @@ -# // Licensed to the Apache Software Foundation (ASF) under one -# // or more contributor license agreements. See the NOTICE file -# // distributed with this work for additional information -# // regarding copyright ownership. The ASF licenses this file -# // to you under the Apache License, Version 2.0 (the -# // "License"); you may not use this file except in compliance -# // with the License. You may obtain a copy of the License at -# // -# // http://www.apache.org/licenses/LICENSE-2.0 -# // -# // Unless required by applicable law or agreed to in writing, -# // software distributed under the License is distributed on an -# // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# // KIND, either express or implied. See the License for the -# // specific language governing permissions and limitations -# // under the License. -if __name__ == '__main__': - with open('shape.tmpl', 'r') as f: - tmpl = f.read() - for i in range(1,100): - with open('../../../../tools/tpcds-tools/queries/sf100/query'+str(i)+'.sql', 'r') as fi: - casei = tmpl.replace('{--}', str(i)) - casei = casei.replace('{query}', fi.read().split(";")[0]) - - with open('../shape/query'+str(i)+'.groovy', 'w') as out: - out.write(casei) \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/rf_prune.tmpl b/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/rf_prune.tmpl deleted file mode 100644 index b29318db8e806a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/rf_prune.tmpl +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query{--}") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - - def ds = """{query}""" - qt_ds_shape_{--} """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/shape.tmpl b/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/shape.tmpl deleted file mode 100644 index c74e55d30c8965..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/ddl/shape.tmpl +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query{--}") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - def ds = """{query}""" - qt_ds_shape_{--} """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/load.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/load.groovy deleted file mode 100644 index 4b99eafdea9012..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/load.groovy +++ /dev/null @@ -1,4127 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - - sql ''' - drop table if exists customer_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_demographics ( - cd_demo_sk bigint not null, - cd_gender char(1), - cd_marital_status char(1), - cd_education_status char(20), - cd_purchase_estimate integer, - cd_credit_rating char(10), - cd_dep_count integer, - cd_dep_employed_count integer, - cd_dep_college_count integer - ) - DUPLICATE KEY(cd_demo_sk) - DISTRIBUTED BY HASH(cd_gender) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists reason - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS reason ( - r_reason_sk bigint not null, - r_reason_id char(16) not null, - r_reason_desc char(100) - ) - DUPLICATE KEY(r_reason_sk) - DISTRIBUTED BY HASH(r_reason_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists date_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS date_dim ( - d_date_sk bigint not null, - d_date_id char(16) not null, - d_date datev2, - d_month_seq integer, - d_week_seq integer, - d_quarter_seq integer, - d_year integer, - d_dow integer, - d_moy integer, - d_dom integer, - d_qoy integer, - d_fy_year integer, - d_fy_quarter_seq integer, - d_fy_week_seq integer, - d_day_name char(9), - d_quarter_name char(6), - d_holiday char(1), - d_weekend char(1), - d_following_holiday char(1), - d_first_dom integer, - d_last_dom integer, - d_same_day_ly integer, - d_same_day_lq integer, - d_current_day char(1), - d_current_week char(1), - d_current_month char(1), - d_current_quarter char(1), - d_current_year char(1) - ) - DUPLICATE KEY(d_date_sk) - PARTITION BY RANGE(d_date_sk) - ( - PARTITION `ppast` values less than("2450815"), - PARTITION `p1998` values less than("2451180"), - PARTITION `p1999` values less than("2451545"), - PARTITION `p2000` values less than("2451911"), - PARTITION `p2001` values less than("2452276"), - PARTITION `p2002` values less than("2452641"), - PARTITION `p2003` values less than("2453006"), - PARTITION `pfuture` values less than("9999999") - ) - DISTRIBUTED BY HASH(d_date_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists warehouse - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS warehouse ( - w_warehouse_sk bigint not null, - w_warehouse_id char(16) not null, - w_warehouse_name varchar(20), - w_warehouse_sq_ft integer, - w_street_number char(10), - w_street_name varchar(60), - w_street_type char(15), - w_suite_number char(10), - w_city varchar(60), - w_county varchar(30), - w_state char(2), - w_zip char(10), - w_country varchar(20), - w_gmt_offset decimalv3(5,2) - ) - DUPLICATE KEY(w_warehouse_sk) - DISTRIBUTED BY HASH(w_warehouse_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_sales ( - cs_sold_date_sk bigint, - cs_item_sk bigint not null, - cs_order_number bigint not null, - cs_sold_time_sk bigint, - cs_ship_date_sk bigint, - cs_bill_customer_sk bigint, - cs_bill_cdemo_sk bigint, - cs_bill_hdemo_sk bigint, - cs_bill_addr_sk bigint, - cs_ship_customer_sk bigint, - cs_ship_cdemo_sk bigint, - cs_ship_hdemo_sk bigint, - cs_ship_addr_sk bigint, - cs_call_center_sk bigint, - cs_catalog_page_sk bigint, - cs_ship_mode_sk bigint, - cs_warehouse_sk bigint, - cs_promo_sk bigint, - cs_quantity integer, - cs_wholesale_cost decimalv3(7,2), - cs_list_price decimalv3(7,2), - cs_sales_price decimalv3(7,2), - cs_ext_discount_amt decimalv3(7,2), - cs_ext_sales_price decimalv3(7,2), - cs_ext_wholesale_cost decimalv3(7,2), - cs_ext_list_price decimalv3(7,2), - cs_ext_tax decimalv3(7,2), - cs_coupon_amt decimalv3(7,2), - cs_ext_ship_cost decimalv3(7,2), - cs_net_paid decimalv3(7,2), - cs_net_paid_inc_tax decimalv3(7,2), - cs_net_paid_inc_ship decimalv3(7,2), - cs_net_paid_inc_ship_tax decimalv3(7,2), - cs_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(cs_sold_date_sk, cs_item_sk) - DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists call_center - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS call_center ( - cc_call_center_sk bigint not null, - cc_call_center_id char(16) not null, - cc_rec_start_date datev2, - cc_rec_end_date datev2, - cc_closed_date_sk integer, - cc_open_date_sk integer, - cc_name varchar(50), - cc_class varchar(50), - cc_employees integer, - cc_sq_ft integer, - cc_hours char(20), - cc_manager varchar(40), - cc_mkt_id integer, - cc_mkt_class char(50), - cc_mkt_desc varchar(100), - cc_market_manager varchar(40), - cc_division integer, - cc_division_name varchar(50), - cc_company integer, - cc_company_name char(50), - cc_street_number char(10), - cc_street_name varchar(60), - cc_street_type char(15), - cc_suite_number char(10), - cc_city varchar(60), - cc_county varchar(30), - cc_state char(2), - cc_zip char(10), - cc_country varchar(20), - cc_gmt_offset decimalv3(5,2), - cc_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(cc_call_center_sk) - DISTRIBUTED BY HASH(cc_call_center_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists inventory - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS inventory ( - inv_date_sk bigint not null, - inv_item_sk bigint not null, - inv_warehouse_sk bigint, - inv_quantity_on_hand integer - ) - DUPLICATE KEY(inv_date_sk, inv_item_sk, inv_warehouse_sk) - DISTRIBUTED BY HASH(inv_date_sk, inv_item_sk, inv_warehouse_sk) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_returns ( - cr_item_sk bigint not null, - cr_order_number bigint not null, - cr_returned_date_sk bigint, - cr_returned_time_sk bigint, - cr_refunded_customer_sk bigint, - cr_refunded_cdemo_sk bigint, - cr_refunded_hdemo_sk bigint, - cr_refunded_addr_sk bigint, - cr_returning_customer_sk bigint, - cr_returning_cdemo_sk bigint, - cr_returning_hdemo_sk bigint, - cr_returning_addr_sk bigint, - cr_call_center_sk bigint, - cr_catalog_page_sk bigint, - cr_ship_mode_sk bigint, - cr_warehouse_sk bigint, - cr_reason_sk bigint, - cr_return_quantity integer, - cr_return_amount decimalv3(7,2), - cr_return_tax decimalv3(7,2), - cr_return_amt_inc_tax decimalv3(7,2), - cr_fee decimalv3(7,2), - cr_return_ship_cost decimalv3(7,2), - cr_refunded_cash decimalv3(7,2), - cr_reversed_charge decimalv3(7,2), - cr_store_credit decimalv3(7,2), - cr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(cr_item_sk, cr_order_number) - DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists household_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS household_demographics ( - hd_demo_sk bigint not null, - hd_income_band_sk bigint, - hd_buy_potential char(15), - hd_dep_count integer, - hd_vehicle_count integer - ) - DUPLICATE KEY(hd_demo_sk) - DISTRIBUTED BY HASH(hd_demo_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer_address - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_address ( - ca_address_sk bigint not null, - ca_address_id char(16) not null, - ca_street_number char(10), - ca_street_name varchar(60), - ca_street_type char(15), - ca_suite_number char(10), - ca_city varchar(60), - ca_county varchar(30), - ca_state char(2), - ca_zip char(10), - ca_country varchar(20), - ca_gmt_offset decimalv3(5,2), - ca_location_type char(20) - ) - DUPLICATE KEY(ca_address_sk) - DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists income_band - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS income_band ( - ib_income_band_sk bigint not null, - ib_lower_bound integer, - ib_upper_bound integer - ) - DUPLICATE KEY(ib_income_band_sk) - DISTRIBUTED BY HASH(ib_income_band_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_page ( - cp_catalog_page_sk bigint not null, - cp_catalog_page_id char(16) not null, - cp_start_date_sk integer, - cp_end_date_sk integer, - cp_department varchar(50), - cp_catalog_number integer, - cp_catalog_page_number integer, - cp_description varchar(100), - cp_type varchar(100) - ) - DUPLICATE KEY(cp_catalog_page_sk) - DISTRIBUTED BY HASH(cp_catalog_page_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists item - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS item ( - i_item_sk bigint not null, - i_item_id char(16) not null, - i_rec_start_date datev2, - i_rec_end_date datev2, - i_item_desc varchar(200), - i_current_price decimalv3(7,2), - i_wholesale_cost decimalv3(7,2), - i_brand_id integer, - i_brand char(50), - i_class_id integer, - i_class char(50), - i_category_id integer, - i_category char(50), - i_manufact_id integer, - i_manufact char(50), - i_size char(20), - i_formulation char(20), - i_color char(20), - i_units char(10), - i_container char(10), - i_manager_id integer, - i_product_name char(50) - ) - DUPLICATE KEY(i_item_sk) - DISTRIBUTED BY HASH(i_item_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_returns ( - wr_item_sk bigint not null, - wr_order_number bigint not null, - wr_returned_date_sk bigint, - wr_returned_time_sk bigint, - wr_refunded_customer_sk bigint, - wr_refunded_cdemo_sk bigint, - wr_refunded_hdemo_sk bigint, - wr_refunded_addr_sk bigint, - wr_returning_customer_sk bigint, - wr_returning_cdemo_sk bigint, - wr_returning_hdemo_sk bigint, - wr_returning_addr_sk bigint, - wr_web_page_sk bigint, - wr_reason_sk bigint, - wr_return_quantity integer, - wr_return_amt decimalv3(7,2), - wr_return_tax decimalv3(7,2), - wr_return_amt_inc_tax decimalv3(7,2), - wr_fee decimalv3(7,2), - wr_return_ship_cost decimalv3(7,2), - wr_refunded_cash decimalv3(7,2), - wr_reversed_charge decimalv3(7,2), - wr_account_credit decimalv3(7,2), - wr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(wr_item_sk, wr_order_number) - DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists web_site - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_site ( - web_site_sk bigint not null, - web_site_id char(16) not null, - web_rec_start_date datev2, - web_rec_end_date datev2, - web_name varchar(50), - web_open_date_sk bigint, - web_close_date_sk bigint, - web_class varchar(50), - web_manager varchar(40), - web_mkt_id integer, - web_mkt_class varchar(50), - web_mkt_desc varchar(100), - web_market_manager varchar(40), - web_company_id integer, - web_company_name char(50), - web_street_number char(10), - web_street_name varchar(60), - web_street_type char(15), - web_suite_number char(10), - web_city varchar(60), - web_county varchar(30), - web_state char(2), - web_zip char(10), - web_country varchar(20), - web_gmt_offset decimalv3(5,2), - web_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(web_site_sk) - DISTRIBUTED BY HASH(web_site_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists promotion - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS promotion ( - p_promo_sk bigint not null, - p_promo_id char(16) not null, - p_start_date_sk bigint, - p_end_date_sk bigint, - p_item_sk bigint, - p_cost decimalv3(15,2), - p_response_targe integer, - p_promo_name char(50), - p_channel_dmail char(1), - p_channel_email char(1), - p_channel_catalog char(1), - p_channel_tv char(1), - p_channel_radio char(1), - p_channel_press char(1), - p_channel_event char(1), - p_channel_demo char(1), - p_channel_details varchar(100), - p_purpose char(15), - p_discount_active char(1) - ) - DUPLICATE KEY(p_promo_sk) - DISTRIBUTED BY HASH(p_promo_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_sales ( - ws_sold_date_sk bigint, - ws_item_sk bigint not null, - ws_order_number bigint not null, - ws_sold_time_sk bigint, - ws_ship_date_sk bigint, - ws_bill_customer_sk bigint, - ws_bill_cdemo_sk bigint, - ws_bill_hdemo_sk bigint, - ws_bill_addr_sk bigint, - ws_ship_customer_sk bigint, - ws_ship_cdemo_sk bigint, - ws_ship_hdemo_sk bigint, - ws_ship_addr_sk bigint, - ws_web_page_sk bigint, - ws_web_site_sk bigint, - ws_ship_mode_sk bigint, - ws_warehouse_sk bigint, - ws_promo_sk bigint, - ws_quantity integer, - ws_wholesale_cost decimalv3(7,2), - ws_list_price decimalv3(7,2), - ws_sales_price decimalv3(7,2), - ws_ext_discount_amt decimalv3(7,2), - ws_ext_sales_price decimalv3(7,2), - ws_ext_wholesale_cost decimalv3(7,2), - ws_ext_list_price decimalv3(7,2), - ws_ext_tax decimalv3(7,2), - ws_coupon_amt decimalv3(7,2), - ws_ext_ship_cost decimalv3(7,2), - ws_net_paid decimalv3(7,2), - ws_net_paid_inc_tax decimalv3(7,2), - ws_net_paid_inc_ship decimalv3(7,2), - ws_net_paid_inc_ship_tax decimalv3(7,2), - ws_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ws_sold_date_sk, ws_item_sk) - DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists store - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store ( - s_store_sk bigint not null, - s_store_id char(16) not null, - s_rec_start_date datev2, - s_rec_end_date datev2, - s_closed_date_sk bigint, - s_store_name varchar(50), - s_number_employees integer, - s_floor_space integer, - s_hours char(20), - s_manager varchar(40), - s_market_id integer, - s_geography_class varchar(100), - s_market_desc varchar(100), - s_market_manager varchar(40), - s_division_id integer, - s_division_name varchar(50), - s_company_id integer, - s_company_name varchar(50), - s_street_number varchar(10), - s_street_name varchar(60), - s_street_type char(15), - s_suite_number char(10), - s_city varchar(60), - s_county varchar(30), - s_state char(2), - s_zip char(10), - s_country varchar(20), - s_gmt_offset decimalv3(5,2), - s_tax_precentage decimalv3(5,2) - ) - DUPLICATE KEY(s_store_sk) - DISTRIBUTED BY HASH(s_store_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists time_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS time_dim ( - t_time_sk bigint not null, - t_time_id char(16) not null, - t_time integer, - t_hour integer, - t_minute integer, - t_second integer, - t_am_pm char(2), - t_shift char(20), - t_sub_shift char(20), - t_meal_time char(20) - ) - DUPLICATE KEY(t_time_sk) - DISTRIBUTED BY HASH(t_time_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_page ( - wp_web_page_sk bigint not null, - wp_web_page_id char(16) not null, - wp_rec_start_date datev2, - wp_rec_end_date datev2, - wp_creation_date_sk bigint, - wp_access_date_sk bigint, - wp_autogen_flag char(1), - wp_customer_sk bigint, - wp_url varchar(100), - wp_type char(50), - wp_char_count integer, - wp_link_count integer, - wp_image_count integer, - wp_max_ad_count integer - ) - DUPLICATE KEY(wp_web_page_sk) - DISTRIBUTED BY HASH(wp_web_page_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists store_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_returns ( - sr_item_sk bigint not null, - sr_ticket_number bigint not null, - sr_returned_date_sk bigint, - sr_return_time_sk bigint, - sr_customer_sk bigint, - sr_cdemo_sk bigint, - sr_hdemo_sk bigint, - sr_addr_sk bigint, - sr_store_sk bigint, - sr_reason_sk bigint, - sr_return_quantity integer, - sr_return_amt decimalv3(7,2), - sr_return_tax decimalv3(7,2), - sr_return_amt_inc_tax decimalv3(7,2), - sr_fee decimalv3(7,2), - sr_return_ship_cost decimalv3(7,2), - sr_refunded_cash decimalv3(7,2), - sr_reversed_charge decimalv3(7,2), - sr_store_credit decimalv3(7,2), - sr_net_loss decimalv3(7,2) - ) - duplicate key(sr_item_sk, sr_ticket_number) - distributed by hash (sr_item_sk, sr_ticket_number) buckets 32 - properties ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists store_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_sales ( - ss_sold_date_sk bigint, - ss_item_sk bigint not null, - ss_ticket_number bigint not null, - ss_sold_time_sk bigint, - ss_customer_sk bigint, - ss_cdemo_sk bigint, - ss_hdemo_sk bigint, - ss_addr_sk bigint, - ss_store_sk bigint, - ss_promo_sk bigint, - ss_quantity integer, - ss_wholesale_cost decimalv3(7,2), - ss_list_price decimalv3(7,2), - ss_sales_price decimalv3(7,2), - ss_ext_discount_amt decimalv3(7,2), - ss_ext_sales_price decimalv3(7,2), - ss_ext_wholesale_cost decimalv3(7,2), - ss_ext_list_price decimalv3(7,2), - ss_ext_tax decimalv3(7,2), - ss_coupon_amt decimalv3(7,2), - ss_net_paid decimalv3(7,2), - ss_net_paid_inc_tax decimalv3(7,2), - ss_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ss_sold_date_sk, ss_item_sk) - DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists ship_mode - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS ship_mode ( - sm_ship_mode_sk bigint not null, - sm_ship_mode_id char(16) not null, - sm_type char(30), - sm_code char(10), - sm_carrier char(20), - sm_contract char(20) - ) - DUPLICATE KEY(sm_ship_mode_sk) - DISTRIBUTED BY HASH(sm_ship_mode_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer ( - c_customer_sk bigint not null, - c_customer_id char(16) not null, - c_current_cdemo_sk bigint, - c_current_hdemo_sk bigint, - c_current_addr_sk bigint, - c_first_shipto_date_sk bigint, - c_first_sales_date_sk bigint, - c_salutation char(10), - c_first_name char(20), - c_last_name char(30), - c_preferred_cust_flag char(1), - c_birth_day integer, - c_birth_month integer, - c_birth_year integer, - c_birth_country varchar(20), - c_login char(13), - c_email_address char(50), - c_last_review_date_sk bigint - ) - DUPLICATE KEY(c_customer_sk) - DISTRIBUTED BY HASH(c_customer_id) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists dbgen_version - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS dbgen_version - ( - dv_version varchar(16) , - dv_create_date datev2 , - dv_create_time datetime , - dv_cmdline_args varchar(200) - ) - DUPLICATE KEY(dv_version) - DISTRIBUTED BY HASH(dv_version) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - -sql ''' -alter table customer add constraint customer_pk primary key (c_customer_sk); -''' - -sql ''' -alter table customer add constraint customer_uk unique (c_customer_id); -''' - -sql ''' -alter table store_sales add constraint ss_fk foreign key(ss_customer_sk) references customer(c_customer_sk); -''' - -sql ''' -alter table web_sales add constraint ws_fk foreign key(ws_bill_customer_sk) references customer(c_customer_sk); -''' - -sql ''' -alter table catalog_sales add constraint cs_fk foreign key(cs_bill_customer_sk) references customer(c_customer_sk); -''' - -sql """ -alter table web_sales modify column ws_web_site_sk set stats ('row_count'='72001237', 'ndv'='24', 'min_value'='1', 'max_value'='24', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_returns modify column wr_item_sk set stats ('row_count'='7197670', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table customer modify column c_birth_country set stats ('row_count'='2000000', 'ndv'='211', 'min_value'='', 'max_value'='ZIMBABWE', 'avg_size'='16787900', 'max_size'='16787900' ) -""" - -sql """ -alter table web_page modify column wp_rec_start_date set stats ('row_count'='2040', 'ndv'='4', 'min_value'='1997-09-03', 'max_value'='2001-09-03', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table store_returns modify column sr_store_credit set stats ('row_count'='28795080', 'ndv'='9907', 'min_value'='0.00', 'max_value'='15642.11', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table warehouse modify column w_county set stats ('row_count'='15', 'ndv'='8', 'min_value'='Barrow County', 'max_value'='Ziebach County', 'avg_size'='207', 'max_size'='207' ) -""" - -sql """ -alter table customer_demographics modify column cd_gender set stats ('row_count'='1920800', 'ndv'='2', 'min_value'='F', 'max_value'='M', 'avg_size'='1920800', 'max_size'='1920800' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_cdemo_sk set stats ('row_count'='7197670', 'ndv'='1868495', 'min_value'='1', 'max_value'='1920800', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table warehouse modify column w_warehouse_id set stats ('row_count'='15', 'ndv'='15', 'min_value'='AAAAAAAABAAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'avg_size'='240', 'max_size'='240' ) -""" - -sql """ -alter table item modify column i_size set stats ('row_count'='204000', 'ndv'='8', 'min_value'='', 'max_value'='small', 'avg_size'='880961', 'max_size'='880961' ) -""" - -sql """ -alter table web_sales modify column ws_sales_price set stats ('row_count'='72001237', 'ndv'='302', 'min_value'='0.00', 'max_value'='300.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table date_dim modify column d_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'min_value'='1', 'max_value'='10436', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table store modify column s_country set stats ('row_count'='402', 'ndv'='2', 'min_value'='', 'max_value'='United States', 'avg_size'='5174', 'max_size'='5174' ) -""" - -sql """ -alter table household_demographics modify column hd_income_band_sk set stats ('row_count'='7200', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='57600', 'max_size'='57600' ) -""" - -sql """ -alter table web_page modify column wp_creation_date_sk set stats ('row_count'='2040', 'ndv'='134', 'min_value'='2450672', 'max_value'='2450815', 'avg_size'='16320', 'max_size'='16320' ) -""" - -sql """ -alter table catalog_returns modify column cr_reason_sk set stats ('row_count'='14404374', 'ndv'='55', 'min_value'='1', 'max_value'='55', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_site modify column web_city set stats ('row_count'='24', 'ndv'='11', 'min_value'='Centerville', 'max_value'='Salem', 'avg_size'='232', 'max_size'='232' ) -""" - -sql """ -alter table item modify column i_class_id set stats ('row_count'='204000', 'ndv'='16', 'min_value'='1', 'max_value'='16', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_hdemo_sk set stats ('row_count'='14404374', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_page modify column wp_customer_sk set stats ('row_count'='2040', 'ndv'='475', 'min_value'='711', 'max_value'='1996257', 'avg_size'='16320', 'max_size'='16320' ) -""" - -sql """ -alter table customer_demographics modify column cd_marital_status set stats ('row_count'='1920800', 'ndv'='5', 'min_value'='D', 'max_value'='W', 'avg_size'='1920800', 'max_size'='1920800' ) -""" - -sql """ -alter table call_center modify column cc_suite_number set stats ('row_count'='30', 'ndv'='14', 'min_value'='Suite 0', 'max_value'='Suite W', 'avg_size'='234', 'max_size'='234' ) -""" - -sql """ -alter table web_page modify column wp_url set stats ('row_count'='2040', 'ndv'='2', 'min_value'='', 'max_value'='http://www.foo.com', 'avg_size'='36270', 'max_size'='36270' ) -""" - -sql """ -alter table web_sales modify column ws_wholesale_cost set stats ('row_count'='72001237', 'ndv'='100', 'min_value'='1.00', 'max_value'='100.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_quantity set stats ('row_count'='14404374', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table catalog_sales modify column cs_wholesale_cost set stats ('row_count'='143997065', 'ndv'='100', 'min_value'='1.00', 'max_value'='100.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table store_sales modify column ss_quantity set stats ('row_count'='287997024', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table date_dim modify column d_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'min_value'='1', 'max_value'='801', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table date_dim modify column d_current_week set stats ('row_count'='73049', 'ndv'='1', 'min_value'='N', 'max_value'='N', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table web_returns modify column wr_reason_sk set stats ('row_count'='7197670', 'ndv'='55', 'min_value'='1', 'max_value'='55', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table promotion modify column p_channel_catalog set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='986', 'max_size'='986' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_paid_inc_ship_tax set stats ('row_count'='143997065', 'ndv'='38890', 'min_value'='0.00', 'max_value'='45460.80', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table catalog_sales modify column cs_order_number set stats ('row_count'='143997065', 'ndv'='16050730', 'min_value'='1', 'max_value'='16000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table time_dim modify column t_am_pm set stats ('row_count'='86400', 'ndv'='2', 'min_value'='AM', 'max_value'='PM', 'avg_size'='172800', 'max_size'='172800' ) -""" - -sql """ -alter table promotion modify column p_promo_name set stats ('row_count'='1000', 'ndv'='11', 'min_value'='', 'max_value'='pri', 'avg_size'='3924', 'max_size'='3924' ) -""" - -sql """ -alter table web_site modify column web_manager set stats ('row_count'='24', 'ndv'='19', 'min_value'='Adam Stonge', 'max_value'='Tommy Jones', 'avg_size'='297', 'max_size'='297' ) -""" - -sql """ -alter table store modify column s_gmt_offset set stats ('row_count'='402', 'ndv'='2', 'min_value'='-6.00', 'max_value'='-5.00', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table web_sales modify column ws_quantity set stats ('row_count'='72001237', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table date_dim modify column d_weekend set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table store modify column s_number_employees set stats ('row_count'='402', 'ndv'='97', 'min_value'='200', 'max_value'='300', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table call_center modify column cc_mkt_desc set stats ('row_count'='30', 'ndv'='22', 'min_value'='As existing eyebrows miss as the matters. Realistic stories may not face almost by a ', 'max_value'='Young tests could buy comfortable, local users o', 'avg_size'='1766', 'max_size'='1766' ) -""" - -sql """ -alter table web_sales modify column ws_net_paid_inc_ship set stats ('row_count'='72001237', 'ndv'='36553', 'min_value'='0.00', 'max_value'='43468.92', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table item modify column i_item_sk set stats ('row_count'='204000', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='1632000', 'max_size'='1632000' ) -""" - -sql """ -alter table web_sales modify column ws_bill_addr_sk set stats ('row_count'='72001237', 'ndv'='998891', 'min_value'='1', 'max_value'='1000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table customer modify column c_salutation set stats ('row_count'='2000000', 'ndv'='7', 'min_value'='', 'max_value'='Sir', 'avg_size'='6257882', 'max_size'='6257882' ) -""" - -sql """ -alter table web_sales modify column ws_net_paid set stats ('row_count'='72001237', 'ndv'='26912', 'min_value'='0.00', 'max_value'='29810.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table time_dim modify column t_time set stats ('row_count'='86400', 'ndv'='86684', 'min_value'='0', 'max_value'='86399', 'avg_size'='345600', 'max_size'='345600' ) -""" - -sql """ -alter table web_site modify column web_mkt_id set stats ('row_count'='24', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table store_returns modify column sr_hdemo_sk set stats ('row_count'='28795080', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table catalog_page modify column cp_catalog_page_sk set stats ('row_count'='20400', 'ndv'='20554', 'min_value'='1', 'max_value'='20400', 'avg_size'='163200', 'max_size'='163200' ) -""" - -sql """ -alter table customer_address modify column ca_address_id set stats ('row_count'='1000000', 'ndv'='999950', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPOAAA', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table date_dim modify column d_year set stats ('row_count'='73049', 'ndv'='202', 'min_value'='1900', 'max_value'='2100', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_returns modify column wr_net_loss set stats ('row_count'='7197670', 'ndv'='11012', 'min_value'='0.50', 'max_value'='15068.96', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table store modify column s_closed_date_sk set stats ('row_count'='402', 'ndv'='69', 'min_value'='2450823', 'max_value'='2451313', 'avg_size'='3216', 'max_size'='3216' ) -""" - -sql """ -alter table customer_address modify column ca_city set stats ('row_count'='1000000', 'ndv'='977', 'min_value'='', 'max_value'='Zion', 'avg_size'='8681993', 'max_size'='8681993' ) -""" - -sql """ -alter table customer modify column c_customer_id set stats ('row_count'='2000000', 'ndv'='1994557', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPAAA', 'avg_size'='32000000', 'max_size'='32000000' ) -""" - -sql """ -alter table web_page modify column wp_access_date_sk set stats ('row_count'='2040', 'ndv'='101', 'min_value'='2452548', 'max_value'='2452648', 'avg_size'='16320', 'max_size'='16320' ) -""" - -sql """ -alter table warehouse modify column w_gmt_offset set stats ('row_count'='15', 'ndv'='2', 'min_value'='-6.00', 'max_value'='-5.00', 'avg_size'='60', 'max_size'='60' ) -""" - -sql """ -alter table warehouse modify column w_street_number set stats ('row_count'='15', 'ndv'='15', 'min_value'='', 'max_value'='957', 'avg_size'='40', 'max_size'='40' ) -""" - -sql """ -alter table store_sales modify column ss_ticket_number set stats ('row_count'='287997024', 'ndv'='23905324', 'min_value'='1', 'max_value'='24000000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table catalog_returns modify column cr_fee set stats ('row_count'='14404374', 'ndv'='101', 'min_value'='0.50', 'max_value'='100.00', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table date_dim modify column d_current_quarter set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table store modify column s_store_name set stats ('row_count'='402', 'ndv'='11', 'min_value'='', 'max_value'='pri', 'avg_size'='1575', 'max_size'='1575' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_wholesale_cost set stats ('row_count'='143997065', 'ndv'='10009', 'min_value'='1.00', 'max_value'='10000.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table reason modify column r_reason_desc set stats ('row_count'='55', 'ndv'='54', 'min_value'='Did not fit', 'max_value'='unauthoized purchase', 'avg_size'='758', 'max_size'='758' ) -""" - -sql """ -alter table date_dim modify column d_same_day_ly set stats ('row_count'='73049', 'ndv'='72450', 'min_value'='2414657', 'max_value'='2487705', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_site modify column web_gmt_offset set stats ('row_count'='24', 'ndv'='2', 'min_value'='-6.00', 'max_value'='-5.00', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table time_dim modify column t_sub_shift set stats ('row_count'='86400', 'ndv'='4', 'min_value'='afternoon', 'max_value'='night', 'avg_size'='597600', 'max_size'='597600' ) -""" - -sql """ -alter table web_sales modify column ws_ship_customer_sk set stats ('row_count'='72001237', 'ndv'='1898561', 'min_value'='1', 'max_value'='2000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_site modify column web_close_date_sk set stats ('row_count'='24', 'ndv'='8', 'min_value'='2443328', 'max_value'='2447131', 'avg_size'='192', 'max_size'='192' ) -""" - -sql """ -alter table call_center modify column cc_market_manager set stats ('row_count'='30', 'ndv'='24', 'min_value'='Charles Corbett', 'max_value'='Tom Root', 'avg_size'='373', 'max_size'='373' ) -""" - -sql """ -alter table store modify column s_market_desc set stats ('row_count'='402', 'ndv'='311', 'min_value'='', 'max_value'='Years get acute years. Right likely players mus', 'avg_size'='23261', 'max_size'='23261' ) -""" - -sql """ -alter table call_center modify column cc_sq_ft set stats ('row_count'='30', 'ndv'='22', 'min_value'='1670015', 'max_value'='31896816', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table customer_address modify column ca_country set stats ('row_count'='1000000', 'ndv'='2', 'min_value'='', 'max_value'='United States', 'avg_size'='12608739', 'max_size'='12608739' ) -""" - -sql """ -alter table promotion modify column p_promo_id set stats ('row_count'='1000', 'ndv'='1004', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPCAAAAA', 'avg_size'='16000', 'max_size'='16000' ) -""" - -sql """ -alter table customer modify column c_preferred_cust_flag set stats ('row_count'='2000000', 'ndv'='3', 'min_value'='', 'max_value'='Y', 'avg_size'='1930222', 'max_size'='1930222' ) -""" - -sql """ -alter table catalog_page modify column cp_catalog_page_id set stats ('row_count'='20400', 'ndv'='20341', 'min_value'='AAAAAAAAAAABAAAA', 'max_value'='AAAAAAAAPPPDAAAA', 'avg_size'='326400', 'max_size'='326400' ) -""" - -sql """ -alter table household_demographics modify column hd_dep_count set stats ('row_count'='7200', 'ndv'='10', 'min_value'='0', 'max_value'='9', 'avg_size'='28800', 'max_size'='28800' ) -""" - -sql """ -alter table store_sales modify column ss_ext_wholesale_cost set stats ('row_count'='287997024', 'ndv'='10009', 'min_value'='1.00', 'max_value'='10000.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table promotion modify column p_end_date_sk set stats ('row_count'='1000', 'ndv'='571', 'min_value'='2450116', 'max_value'='2450967', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table catalog_sales modify column cs_sold_date_sk set stats ('row_count'='143997065', 'ndv'='1835', 'min_value'='2450815', 'max_value'='2452654', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table web_returns modify column wr_return_quantity set stats ('row_count'='7197670', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table store_returns modify column sr_return_amt set stats ('row_count'='28795080', 'ndv'='15493', 'min_value'='0.00', 'max_value'='18973.20', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table web_site modify column web_rec_start_date set stats ('row_count'='24', 'ndv'='4', 'min_value'='1997-08-16', 'max_value'='2001-08-16', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table store_sales modify column ss_coupon_amt set stats ('row_count'='287997024', 'ndv'='16198', 'min_value'='0.00', 'max_value'='19225.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table call_center modify column cc_company set stats ('row_count'='30', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table warehouse modify column w_state set stats ('row_count'='15', 'ndv'='8', 'min_value'='AL', 'max_value'='SD', 'avg_size'='30', 'max_size'='30' ) -""" - -sql """ -alter table catalog_returns modify column cr_warehouse_sk set stats ('row_count'='14404374', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table catalog_returns modify column cr_returning_customer_sk set stats ('row_count'='14404374', 'ndv'='1991754', 'min_value'='1', 'max_value'='2000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table customer_address modify column ca_state set stats ('row_count'='1000000', 'ndv'='52', 'min_value'='', 'max_value'='WY', 'avg_size'='1939752', 'max_size'='1939752' ) -""" - -sql """ -alter table customer modify column c_customer_sk set stats ('row_count'='2000000', 'ndv'='1994393', 'min_value'='1', 'max_value'='2000000', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table store_sales modify column ss_item_sk set stats ('row_count'='287997024', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_customer_sk set stats ('row_count'='143997065', 'ndv'='1993190', 'min_value'='1', 'max_value'='2000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_cash set stats ('row_count'='7197670', 'ndv'='14621', 'min_value'='0.00', 'max_value'='26466.56', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table customer modify column c_birth_day set stats ('row_count'='2000000', 'ndv'='31', 'min_value'='1', 'max_value'='31', 'avg_size'='8000000', 'max_size'='8000000' ) -""" - -sql """ -alter table income_band modify column ib_income_band_sk set stats ('row_count'='20', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='160', 'max_size'='160' ) -""" - -sql """ -alter table web_returns modify column wr_fee set stats ('row_count'='7197670', 'ndv'='101', 'min_value'='0.50', 'max_value'='100.00', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table item modify column i_class set stats ('row_count'='204000', 'ndv'='100', 'min_value'='', 'max_value'='womens watch', 'avg_size'='1585937', 'max_size'='1585937' ) -""" - -sql """ -alter table customer modify column c_last_review_date_sk set stats ('row_count'='2000000', 'ndv'='366', 'min_value'='2452283', 'max_value'='2452648', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table web_site modify column web_rec_end_date set stats ('row_count'='24', 'ndv'='3', 'min_value'='1999-08-16', 'max_value'='2001-08-15', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table catalog_returns modify column cr_reversed_charge set stats ('row_count'='14404374', 'ndv'='12359', 'min_value'='0.00', 'max_value'='23801.24', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table customer_address modify column ca_location_type set stats ('row_count'='1000000', 'ndv'='4', 'min_value'='', 'max_value'='single family', 'avg_size'='8728128', 'max_size'='8728128' ) -""" - -sql """ -alter table warehouse modify column w_street_type set stats ('row_count'='15', 'ndv'='11', 'min_value'='', 'max_value'='Wy', 'avg_size'='58', 'max_size'='58' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_hdemo_sk set stats ('row_count'='7197670', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table call_center modify column cc_manager set stats ('row_count'='30', 'ndv'='22', 'min_value'='Alden Snyder', 'max_value'='Wayne Ray', 'avg_size'='368', 'max_size'='368' ) -""" - -sql """ -alter table web_site modify column web_open_date_sk set stats ('row_count'='24', 'ndv'='12', 'min_value'='2450628', 'max_value'='2450807', 'avg_size'='192', 'max_size'='192' ) -""" - -sql """ -alter table dbgen_version modify column dv_version set stats ('row_count'='1', 'ndv'='1', 'min_value'='3.2.0', 'max_value'='3.2.0', 'avg_size'='5', 'max_size'='5' ) -""" - -sql """ -alter table catalog_sales modify column cs_sales_price set stats ('row_count'='143997065', 'ndv'='302', 'min_value'='0.00', 'max_value'='300.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table catalog_page modify column cp_catalog_number set stats ('row_count'='20400', 'ndv'='109', 'min_value'='1', 'max_value'='109', 'avg_size'='81600', 'max_size'='81600' ) -""" - -sql """ -alter table promotion modify column p_channel_press set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='985', 'max_size'='985' ) -""" - -sql """ -alter table web_sales modify column ws_ship_addr_sk set stats ('row_count'='72001237', 'ndv'='997336', 'min_value'='1', 'max_value'='1000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_cash set stats ('row_count'='14404374', 'ndv'='16271', 'min_value'='0.00', 'max_value'='24544.84', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table call_center modify column cc_mkt_class set stats ('row_count'='30', 'ndv'='25', 'min_value'='A bit narrow forms matter animals. Consist', 'max_value'='Yesterday new men can make moreov', 'avg_size'='1033', 'max_size'='1033' ) -""" - -sql """ -alter table catalog_returns modify column cr_returned_date_sk set stats ('row_count'='14404374', 'ndv'='2105', 'min_value'='2450821', 'max_value'='2452921', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_page modify column wp_max_ad_count set stats ('row_count'='2040', 'ndv'='5', 'min_value'='0', 'max_value'='4', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table call_center modify column cc_closed_date_sk set stats ('row_count'='30', 'ndv'='0', 'num_nulls'='42', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table web_returns modify column wr_return_ship_cost set stats ('row_count'='7197670', 'ndv'='10429', 'min_value'='0.00', 'max_value'='13602.60', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table warehouse modify column w_warehouse_name set stats ('row_count'='15', 'ndv'='15', 'min_value'='', 'max_value'='Rooms cook ', 'avg_size'='230', 'max_size'='230' ) -""" - -sql """ -alter table web_page modify column wp_type set stats ('row_count'='2040', 'ndv'='8', 'min_value'='', 'max_value'='welcome', 'avg_size'='12856', 'max_size'='12856' ) -""" - -sql """ -alter table store modify column s_division_name set stats ('row_count'='402', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='2779', 'max_size'='2779' ) -""" - -sql """ -alter table date_dim modify column d_dom set stats ('row_count'='73049', 'ndv'='31', 'min_value'='1', 'max_value'='31', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table date_dim modify column d_fy_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'min_value'='1', 'max_value'='10436', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_returns modify column wr_return_tax set stats ('row_count'='7197670', 'ndv'='1820', 'min_value'='0.00', 'max_value'='2551.16', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_addr_sk set stats ('row_count'='143997065', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store modify column s_street_name set stats ('row_count'='402', 'ndv'='256', 'min_value'='', 'max_value'='Woodland ', 'avg_size'='3384', 'max_size'='3384' ) -""" - -sql """ -alter table store_sales modify column ss_hdemo_sk set stats ('row_count'='287997024', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table web_sales modify column ws_web_page_sk set stats ('row_count'='72001237', 'ndv'='2032', 'min_value'='1', 'max_value'='2040', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table warehouse modify column w_warehouse_sq_ft set stats ('row_count'='15', 'ndv'='14', 'min_value'='73065', 'max_value'='977787', 'avg_size'='60', 'max_size'='60' ) -""" - -sql """ -alter table ship_mode modify column sm_type set stats ('row_count'='20', 'ndv'='6', 'min_value'='EXPRESS', 'max_value'='TWO DAY', 'avg_size'='150', 'max_size'='150' ) -""" - -sql """ -alter table date_dim modify column d_fy_year set stats ('row_count'='73049', 'ndv'='202', 'min_value'='1900', 'max_value'='2100', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table catalog_sales modify column cs_catalog_page_sk set stats ('row_count'='143997065', 'ndv'='11515', 'min_value'='1', 'max_value'='17108', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table web_sales modify column ws_warehouse_sk set stats ('row_count'='72001237', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table item modify column i_wholesale_cost set stats ('row_count'='204000', 'ndv'='89', 'min_value'='0.02', 'max_value'='88.91', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table store_returns modify column sr_return_tax set stats ('row_count'='28795080', 'ndv'='1427', 'min_value'='0.00', 'max_value'='1611.71', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table store_sales modify column ss_net_paid_inc_tax set stats ('row_count'='287997024', 'ndv'='20203', 'min_value'='0.00', 'max_value'='21344.38', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table web_site modify column web_mkt_desc set stats ('row_count'='24', 'ndv'='15', 'min_value'='Acres see else children. Mutual too', 'max_value'='Well similar decisions used to keep hardly democratic, personal priorities.', 'avg_size'='1561', 'max_size'='1561' ) -""" - -sql """ -alter table customer modify column c_current_cdemo_sk set stats ('row_count'='2000000', 'ndv'='1221921', 'min_value'='1', 'max_value'='1920798', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table web_returns modify column wr_returning_customer_sk set stats ('row_count'='7197670', 'ndv'='1926139', 'min_value'='1', 'max_value'='2000000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table store_sales modify column ss_ext_sales_price set stats ('row_count'='287997024', 'ndv'='19105', 'min_value'='0.00', 'max_value'='19878.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table catalog_sales modify column cs_item_sk set stats ('row_count'='143997065', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store modify column s_store_id set stats ('row_count'='402', 'ndv'='201', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPNAAAAAA', 'avg_size'='6432', 'max_size'='6432' ) -""" - -sql """ -alter table web_site modify column web_mkt_class set stats ('row_count'='24', 'ndv'='18', 'min_value'='About rural reasons shall no', 'max_value'='Wide, final representat', 'avg_size'='758', 'max_size'='758' ) -""" - -sql """ -alter table customer modify column c_birth_month set stats ('row_count'='2000000', 'ndv'='12', 'min_value'='1', 'max_value'='12', 'avg_size'='8000000', 'max_size'='8000000' ) -""" - -sql """ -alter table date_dim modify column d_last_dom set stats ('row_count'='73049', 'ndv'='2419', 'min_value'='2415020', 'max_value'='2488372', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_sales modify column ws_bill_customer_sk set stats ('row_count'='72001237', 'ndv'='1899439', 'min_value'='1', 'max_value'='2000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_sales modify column ws_item_sk set stats ('row_count'='72001237', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table call_center modify column cc_state set stats ('row_count'='30', 'ndv'='8', 'min_value'='AL', 'max_value'='TN', 'avg_size'='60', 'max_size'='60' ) -""" - -sql """ -alter table promotion modify column p_start_date_sk set stats ('row_count'='1000', 'ndv'='574', 'min_value'='2450100', 'max_value'='2450915', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_date_sk set stats ('row_count'='143997065', 'ndv'='1933', 'min_value'='2450817', 'max_value'='2452744', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store_sales modify column ss_sales_price set stats ('row_count'='287997024', 'ndv'='202', 'min_value'='0.00', 'max_value'='200.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table promotion modify column p_channel_details set stats ('row_count'='1000', 'ndv'='992', 'min_value'='', 'max_value'='Young, valuable companies watch walls. Payments can flour', 'avg_size'='39304', 'max_size'='39304' ) -""" - -sql """ -alter table item modify column i_rec_end_date set stats ('row_count'='204000', 'ndv'='3', 'min_value'='1999-10-27', 'max_value'='2001-10-26', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table item modify column i_container set stats ('row_count'='204000', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='1424430', 'max_size'='1424430' ) -""" - -sql """ -alter table web_site modify column web_tax_percentage set stats ('row_count'='24', 'ndv'='1', 'min_value'='0.00', 'max_value'='0.12', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table customer modify column c_email_address set stats ('row_count'='2000000', 'ndv'='1936613', 'min_value'='', 'max_value'='Zulma.Wright@AqokXsju9f2yj.org', 'avg_size'='53014147', 'max_size'='53014147' ) -""" - -sql """ -alter table income_band modify column ib_lower_bound set stats ('row_count'='20', 'ndv'='20', 'min_value'='0', 'max_value'='190001', 'avg_size'='80', 'max_size'='80' ) -""" - -sql """ -alter table web_returns modify column wr_account_credit set stats ('row_count'='7197670', 'ndv'='10868', 'min_value'='0.00', 'max_value'='23028.27', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table web_sales modify column ws_bill_hdemo_sk set stats ('row_count'='72001237', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table store_sales modify column ss_store_sk set stats ('row_count'='287997024', 'ndv'='200', 'min_value'='1', 'max_value'='400', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table store_returns modify column sr_customer_sk set stats ('row_count'='28795080', 'ndv'='1994323', 'min_value'='1', 'max_value'='2000000', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table call_center modify column cc_class set stats ('row_count'='30', 'ndv'='3', 'min_value'='large', 'max_value'='small', 'avg_size'='166', 'max_size'='166' ) -""" - -sql """ -alter table time_dim modify column t_meal_time set stats ('row_count'='86400', 'ndv'='4', 'min_value'='', 'max_value'='lunch', 'avg_size'='248400', 'max_size'='248400' ) -""" - -sql """ -alter table web_site modify column web_street_number set stats ('row_count'='24', 'ndv'='14', 'min_value'='184', 'max_value'='973', 'avg_size'='70', 'max_size'='70' ) -""" - -sql """ -alter table catalog_sales modify column cs_promo_sk set stats ('row_count'='143997065', 'ndv'='986', 'min_value'='1', 'max_value'='1000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table customer modify column c_last_name set stats ('row_count'='2000000', 'ndv'='4990', 'min_value'='', 'max_value'='Zuniga', 'avg_size'='11833714', 'max_size'='11833714' ) -""" - -sql """ -alter table promotion modify column p_channel_event set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='986', 'max_size'='986' ) -""" - -sql """ -alter table store_returns modify column sr_return_amt_inc_tax set stats ('row_count'='28795080', 'ndv'='16190', 'min_value'='0.00', 'max_value'='20002.89', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table dbgen_version modify column dv_cmdline_args set stats ('row_count'='1', 'ndv'='1', 'min_value'='-SCALE 100 -PARALLEL 10 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/doris/tools/tpcds-tools/bin/tpcds-data ', 'max_value'='-SCALE 100 -PARALLEL 10 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/doris/tools/tpcds-tools/bin/tpcds-data ', 'avg_size'='105', 'max_size'='105' ) -""" - -sql """ -alter table warehouse modify column w_street_name set stats ('row_count'='15', 'ndv'='15', 'min_value'='', 'max_value'='Wilson Elm', 'avg_size'='128', 'max_size'='128' ) -""" - -sql """ -alter table call_center modify column cc_county set stats ('row_count'='30', 'ndv'='8', 'min_value'='Barrow County', 'max_value'='Ziebach County', 'avg_size'='423', 'max_size'='423' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_addr_sk set stats ('row_count'='14404374', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table catalog_returns modify column cr_returning_cdemo_sk set stats ('row_count'='14404374', 'ndv'='1913762', 'min_value'='1', 'max_value'='1920800', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_sales modify column ws_ship_hdemo_sk set stats ('row_count'='72001237', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table call_center modify column cc_mkt_id set stats ('row_count'='30', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table store modify column s_store_sk set stats ('row_count'='402', 'ndv'='398', 'min_value'='1', 'max_value'='402', 'avg_size'='3216', 'max_size'='3216' ) -""" - -sql """ -alter table customer_demographics modify column cd_dep_employed_count set stats ('row_count'='1920800', 'ndv'='7', 'min_value'='0', 'max_value'='6', 'avg_size'='7683200', 'max_size'='7683200' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_list_price set stats ('row_count'='143997065', 'ndv'='29336', 'min_value'='1.00', 'max_value'='29997.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table web_sales modify column ws_bill_cdemo_sk set stats ('row_count'='72001237', 'ndv'='1835731', 'min_value'='1', 'max_value'='1920800', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_returns modify column wr_order_number set stats ('row_count'='7197670', 'ndv'='4249346', 'min_value'='1', 'max_value'='5999999', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table web_site modify column web_country set stats ('row_count'='24', 'ndv'='1', 'min_value'='United States', 'max_value'='United States', 'avg_size'='312', 'max_size'='312' ) -""" - -sql """ -alter table web_sales modify column ws_net_profit set stats ('row_count'='72001237', 'ndv'='27958', 'min_value'='-9997.00', 'max_value'='19840.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table customer_demographics modify column cd_dep_college_count set stats ('row_count'='1920800', 'ndv'='7', 'min_value'='0', 'max_value'='6', 'avg_size'='7683200', 'max_size'='7683200' ) -""" - -sql """ -alter table store modify column s_company_name set stats ('row_count'='402', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='2793', 'max_size'='2793' ) -""" - -sql """ -alter table web_site modify column web_zip set stats ('row_count'='24', 'ndv'='14', 'min_value'='28828', 'max_value'='78828', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table warehouse modify column w_city set stats ('row_count'='15', 'ndv'='11', 'min_value'='Bethel', 'max_value'='Union', 'avg_size'='111', 'max_size'='111' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_paid_inc_tax set stats ('row_count'='143997065', 'ndv'='28777', 'min_value'='0.00', 'max_value'='31745.52', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table store_returns modify column sr_return_quantity set stats ('row_count'='28795080', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table date_dim modify column d_date_id set stats ('row_count'='73049', 'ndv'='72907', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'avg_size'='1168784', 'max_size'='1168784' ) -""" - -sql """ -alter table store_sales modify column ss_net_profit set stats ('row_count'='287997024', 'ndv'='19581', 'min_value'='-10000.00', 'max_value'='9889.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table call_center modify column cc_tax_percentage set stats ('row_count'='30', 'ndv'='1', 'min_value'='0.00', 'max_value'='0.12', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table promotion modify column p_response_targe set stats ('row_count'='1000', 'ndv'='1', 'min_value'='1', 'max_value'='1', 'avg_size'='4000', 'max_size'='4000' ) -""" - -sql """ -alter table time_dim modify column t_second set stats ('row_count'='86400', 'ndv'='60', 'min_value'='0', 'max_value'='59', 'avg_size'='345600', 'max_size'='345600' ) -""" - -sql """ -alter table date_dim modify column d_first_dom set stats ('row_count'='73049', 'ndv'='2410', 'min_value'='2415021', 'max_value'='2488070', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table web_returns modify column wr_return_amt set stats ('row_count'='7197670', 'ndv'='19263', 'min_value'='0.00', 'max_value'='28346.31', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table web_site modify column web_site_sk set stats ('row_count'='24', 'ndv'='24', 'min_value'='1', 'max_value'='24', 'avg_size'='192', 'max_size'='192' ) -""" - -sql """ -alter table catalog_returns modify column cr_ship_mode_sk set stats ('row_count'='14404374', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table warehouse modify column w_suite_number set stats ('row_count'='15', 'ndv'='14', 'min_value'='', 'max_value'='Suite X', 'avg_size'='111', 'max_size'='111' ) -""" - -sql """ -alter table web_page modify column wp_web_page_sk set stats ('row_count'='2040', 'ndv'='2032', 'min_value'='1', 'max_value'='2040', 'avg_size'='16320', 'max_size'='16320' ) -""" - -sql """ -alter table item modify column i_brand_id set stats ('row_count'='204000', 'ndv'='951', 'min_value'='1001001', 'max_value'='10016017', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table store_sales modify column ss_customer_sk set stats ('row_count'='287997024', 'ndv'='1994393', 'min_value'='1', 'max_value'='2000000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table time_dim modify column t_minute set stats ('row_count'='86400', 'ndv'='60', 'min_value'='0', 'max_value'='59', 'avg_size'='345600', 'max_size'='345600' ) -""" - -sql """ -alter table item modify column i_item_id set stats ('row_count'='204000', 'ndv'='103230', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPBAAA', 'avg_size'='3264000', 'max_size'='3264000' ) -""" - -sql """ -alter table date_dim modify column d_current_day set stats ('row_count'='73049', 'ndv'='1', 'min_value'='N', 'max_value'='N', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table item modify column i_manufact set stats ('row_count'='204000', 'ndv'='1004', 'min_value'='', 'max_value'='pripripri', 'avg_size'='2298787', 'max_size'='2298787' ) -""" - -sql """ -alter table store modify column s_division_id set stats ('row_count'='402', 'ndv'='1', 'min_value'='1', 'max_value'='1', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table dbgen_version modify column dv_create_date set stats ('row_count'='1', 'ndv'='1', 'min_value'='2023-03-16', 'max_value'='2023-03-16', 'avg_size'='4', 'max_size'='4' ) -""" - -sql """ -alter table web_site modify column web_name set stats ('row_count'='24', 'ndv'='4', 'min_value'='site_0', 'max_value'='site_3', 'avg_size'='144', 'max_size'='144' ) -""" - -sql """ -alter table customer_address modify column ca_suite_number set stats ('row_count'='1000000', 'ndv'='76', 'min_value'='', 'max_value'='Suite Y', 'avg_size'='7652799', 'max_size'='7652799' ) -""" - -sql """ -alter table customer modify column c_first_sales_date_sk set stats ('row_count'='2000000', 'ndv'='3644', 'min_value'='2448998', 'max_value'='2452648', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table web_sales modify column ws_order_number set stats ('row_count'='72001237', 'ndv'='6015811', 'min_value'='1', 'max_value'='6000000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table store modify column s_zip set stats ('row_count'='402', 'ndv'='102', 'min_value'='', 'max_value'='79431', 'avg_size'='1980', 'max_size'='1980' ) -""" - -sql """ -alter table promotion modify column p_item_sk set stats ('row_count'='1000', 'ndv'='970', 'min_value'='280', 'max_value'='203966', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table web_sales modify column ws_ship_cdemo_sk set stats ('row_count'='72001237', 'ndv'='1822804', 'min_value'='1', 'max_value'='1920800', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table web_site modify column web_street_name set stats ('row_count'='24', 'ndv'='24', 'min_value'='11th ', 'max_value'='Wilson Ridge', 'avg_size'='219', 'max_size'='219' ) -""" - -sql """ -alter table catalog_returns modify column cr_returning_hdemo_sk set stats ('row_count'='14404374', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table customer_demographics modify column cd_purchase_estimate set stats ('row_count'='1920800', 'ndv'='20', 'min_value'='500', 'max_value'='10000', 'avg_size'='7683200', 'max_size'='7683200' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_customer_sk set stats ('row_count'='7197670', 'ndv'='1923644', 'min_value'='1', 'max_value'='2000000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_mode_sk set stats ('row_count'='143997065', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table customer modify column c_birth_year set stats ('row_count'='2000000', 'ndv'='69', 'min_value'='1924', 'max_value'='1992', 'avg_size'='8000000', 'max_size'='8000000' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_tax set stats ('row_count'='14404374', 'ndv'='1926', 'min_value'='0.00', 'max_value'='2390.75', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table web_sales modify column ws_ext_sales_price set stats ('row_count'='72001237', 'ndv'='27115', 'min_value'='0.00', 'max_value'='29810.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table catalog_page modify column cp_catalog_page_number set stats ('row_count'='20400', 'ndv'='189', 'min_value'='1', 'max_value'='188', 'avg_size'='81600', 'max_size'='81600' ) -""" - -sql """ -alter table date_dim modify column d_date_sk set stats ('row_count'='73049', 'ndv'='73042', 'min_value'='2415022', 'max_value'='2488070', 'avg_size'='584392', 'max_size'='584392' ) -""" - -sql """ -alter table date_dim modify column d_month_seq set stats ('row_count'='73049', 'ndv'='2398', 'min_value'='0', 'max_value'='2400', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table inventory modify column inv_item_sk set stats ('row_count'='399330000', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='3194640000', 'max_size'='3194640000' ) -""" - -sql """ -alter table call_center modify column cc_open_date_sk set stats ('row_count'='30', 'ndv'='15', 'min_value'='2450794', 'max_value'='2451146', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table store_sales modify column ss_addr_sk set stats ('row_count'='287997024', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table web_returns modify column wr_returning_addr_sk set stats ('row_count'='7197670', 'ndv'='999584', 'min_value'='1', 'max_value'='1000000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table store modify column s_market_id set stats ('row_count'='402', 'ndv'='10', 'min_value'='1', 'max_value'='10', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table catalog_sales modify column cs_bill_cdemo_sk set stats ('row_count'='143997065', 'ndv'='1915709', 'min_value'='1', 'max_value'='1920800', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table customer_address modify column ca_address_sk set stats ('row_count'='1000000', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='8000000', 'max_size'='8000000' ) -""" - -sql """ -alter table web_site modify column web_market_manager set stats ('row_count'='24', 'ndv'='21', 'min_value'='Albert Leung', 'max_value'='Zachery Oneil', 'avg_size'='294', 'max_size'='294' ) -""" - -sql """ -alter table item modify column i_rec_start_date set stats ('row_count'='204000', 'ndv'='4', 'min_value'='1997-10-27', 'max_value'='2001-10-27', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table web_sales modify column ws_ship_mode_sk set stats ('row_count'='72001237', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table call_center modify column cc_street_type set stats ('row_count'='30', 'ndv'='9', 'min_value'='Avenue', 'max_value'='Way', 'avg_size'='140', 'max_size'='140' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_paid_inc_ship set stats ('row_count'='143997065', 'ndv'='37890', 'min_value'='0.00', 'max_value'='43725.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table store_returns modify column sr_returned_date_sk set stats ('row_count'='28795080', 'ndv'='2010', 'min_value'='2450820', 'max_value'='2452822', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table item modify column i_category set stats ('row_count'='204000', 'ndv'='11', 'min_value'='', 'max_value'='Women', 'avg_size'='1201703', 'max_size'='1201703' ) -""" - -sql """ -alter table store modify column s_street_type set stats ('row_count'='402', 'ndv'='21', 'min_value'='', 'max_value'='Wy', 'avg_size'='1657', 'max_size'='1657' ) -""" - -sql """ -alter table web_sales modify column ws_ext_list_price set stats ('row_count'='72001237', 'ndv'='29104', 'min_value'='1.02', 'max_value'='29997.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table call_center modify column cc_city set stats ('row_count'='30', 'ndv'='12', 'min_value'='Bethel', 'max_value'='Shady Grove', 'avg_size'='282', 'max_size'='282' ) -""" - -sql """ -alter table household_demographics modify column hd_buy_potential set stats ('row_count'='7200', 'ndv'='6', 'min_value'='0-500', 'max_value'='Unknown', 'avg_size'='54000', 'max_size'='54000' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_cdemo_sk set stats ('row_count'='14404374', 'ndv'='1900770', 'min_value'='1', 'max_value'='1920800', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table item modify column i_manager_id set stats ('row_count'='204000', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table customer_address modify column ca_gmt_offset set stats ('row_count'='1000000', 'ndv'='6', 'min_value'='-10.00', 'max_value'='-5.00', 'avg_size'='4000000', 'max_size'='4000000' ) -""" - -sql """ -alter table store modify column s_state set stats ('row_count'='402', 'ndv'='10', 'min_value'='', 'max_value'='TN', 'avg_size'='800', 'max_size'='800' ) -""" - -sql """ -alter table catalog_returns modify column cr_refunded_customer_sk set stats ('row_count'='14404374', 'ndv'='1977657', 'min_value'='1', 'max_value'='2000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table item modify column i_product_name set stats ('row_count'='204000', 'ndv'='200390', 'min_value'='', 'max_value'='pripripripripriought', 'avg_size'='4546148', 'max_size'='4546148' ) -""" - -sql """ -alter table store_returns modify column sr_addr_sk set stats ('row_count'='28795080', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table item modify column i_category_id set stats ('row_count'='204000', 'ndv'='10', 'min_value'='1', 'max_value'='10', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table store_returns modify column sr_return_ship_cost set stats ('row_count'='28795080', 'ndv'='8186', 'min_value'='0.00', 'max_value'='9578.25', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table catalog_sales modify column cs_sold_time_sk set stats ('row_count'='143997065', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table date_dim modify column d_day_name set stats ('row_count'='73049', 'ndv'='7', 'min_value'='Friday', 'max_value'='Wednesday', 'avg_size'='521779', 'max_size'='521779' ) -""" - -sql """ -alter table web_returns modify column wr_web_page_sk set stats ('row_count'='7197670', 'ndv'='2032', 'min_value'='1', 'max_value'='2040', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table store modify column s_street_number set stats ('row_count'='402', 'ndv'='267', 'min_value'='', 'max_value'='986', 'avg_size'='1150', 'max_size'='1150' ) -""" - -sql """ -alter table web_sales modify column ws_sold_time_sk set stats ('row_count'='72001237', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table store_sales modify column ss_ext_tax set stats ('row_count'='287997024', 'ndv'='1722', 'min_value'='0.00', 'max_value'='1762.38', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table date_dim modify column d_dow set stats ('row_count'='73049', 'ndv'='7', 'min_value'='0', 'max_value'='6', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table store_returns modify column sr_refunded_cash set stats ('row_count'='28795080', 'ndv'='12626', 'min_value'='0.00', 'max_value'='17556.95', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table call_center modify column cc_call_center_sk set stats ('row_count'='30', 'ndv'='30', 'min_value'='1', 'max_value'='30', 'avg_size'='240', 'max_size'='240' ) -""" - -sql """ -alter table store_returns modify column sr_fee set stats ('row_count'='28795080', 'ndv'='101', 'min_value'='0.50', 'max_value'='100.00', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_ship_cost set stats ('row_count'='14404374', 'ndv'='11144', 'min_value'='0.00', 'max_value'='14130.96', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table catalog_sales modify column cs_bill_addr_sk set stats ('row_count'='143997065', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table time_dim modify column t_time_id set stats ('row_count'='86400', 'ndv'='85663', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPAAAA', 'avg_size'='1382400', 'max_size'='1382400' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_paid set stats ('row_count'='143997065', 'ndv'='27448', 'min_value'='0.00', 'max_value'='29760.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table catalog_sales modify column cs_bill_customer_sk set stats ('row_count'='143997065', 'ndv'='1993691', 'min_value'='1', 'max_value'='2000000', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table web_sales modify column ws_coupon_amt set stats ('row_count'='72001237', 'ndv'='20659', 'min_value'='0.00', 'max_value'='27591.16', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table promotion modify column p_promo_sk set stats ('row_count'='1000', 'ndv'='986', 'min_value'='1', 'max_value'='1000', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table web_page modify column wp_rec_end_date set stats ('row_count'='2040', 'ndv'='3', 'min_value'='1999-09-03', 'max_value'='2001-09-02', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table web_returns modify column wr_refunded_addr_sk set stats ('row_count'='7197670', 'ndv'='999503', 'min_value'='1', 'max_value'='1000000', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table web_page modify column wp_char_count set stats ('row_count'='2040', 'ndv'='1363', 'min_value'='303', 'max_value'='8523', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table promotion modify column p_purpose set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='6909', 'max_size'='6909' ) -""" - -sql """ -alter table web_sales modify column ws_ship_date_sk set stats ('row_count'='72001237', 'ndv'='1952', 'min_value'='2450817', 'max_value'='2452762', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table date_dim modify column d_current_year set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table store_sales modify column ss_net_paid set stats ('row_count'='287997024', 'ndv'='19028', 'min_value'='0.00', 'max_value'='19878.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table web_returns modify column wr_returned_date_sk set stats ('row_count'='7197670', 'ndv'='2185', 'min_value'='2450820', 'max_value'='2453002', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table store_returns modify column sr_cdemo_sk set stats ('row_count'='28795080', 'ndv'='1916366', 'min_value'='1', 'max_value'='1920800', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table catalog_page modify column cp_description set stats ('row_count'='20400', 'ndv'='20501', 'min_value'='', 'max_value'='Youngsters should get very. Bad, necessary years must pick telecommunications. Co', 'avg_size'='1507423', 'max_size'='1507423' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_tax set stats ('row_count'='143997065', 'ndv'='2488', 'min_value'='0.00', 'max_value'='2619.36', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table date_dim modify column d_holiday set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_discount_amt set stats ('row_count'='143997065', 'ndv'='27722', 'min_value'='0.00', 'max_value'='29765.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table warehouse modify column w_zip set stats ('row_count'='15', 'ndv'='15', 'min_value'='28721', 'max_value'='78721', 'avg_size'='75', 'max_size'='75' ) -""" - -sql """ -alter table catalog_returns modify column cr_catalog_page_sk set stats ('row_count'='14404374', 'ndv'='11515', 'min_value'='1', 'max_value'='17108', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table catalog_returns modify column cr_order_number set stats ('row_count'='14404374', 'ndv'='9425725', 'min_value'='2', 'max_value'='16000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_cdemo_sk set stats ('row_count'='143997065', 'ndv'='1916125', 'min_value'='1', 'max_value'='1920800', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table catalog_returns modify column cr_returned_time_sk set stats ('row_count'='14404374', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_sales modify column ws_ext_wholesale_cost set stats ('row_count'='72001237', 'ndv'='10009', 'min_value'='1.00', 'max_value'='10000.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table web_page modify column wp_image_count set stats ('row_count'='2040', 'ndv'='7', 'min_value'='1', 'max_value'='7', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table time_dim modify column t_shift set stats ('row_count'='86400', 'ndv'='3', 'min_value'='first', 'max_value'='third', 'avg_size'='460800', 'max_size'='460800' ) -""" - -sql """ -alter table store_sales modify column ss_ext_discount_amt set stats ('row_count'='287997024', 'ndv'='16198', 'min_value'='0.00', 'max_value'='19225.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table warehouse modify column w_warehouse_sk set stats ('row_count'='15', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table store_sales modify column ss_sold_time_sk set stats ('row_count'='287997024', 'ndv'='47252', 'min_value'='28800', 'max_value'='75599', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table customer_address modify column ca_street_name set stats ('row_count'='1000000', 'ndv'='8155', 'min_value'='', 'max_value'='Woodland Woodland', 'avg_size'='8445649', 'max_size'='8445649' ) -""" - -sql """ -alter table customer_address modify column ca_county set stats ('row_count'='1000000', 'ndv'='1825', 'min_value'='', 'max_value'='Ziebach County', 'avg_size'='13540273', 'max_size'='13540273' ) -""" - -sql """ -alter table ship_mode modify column sm_contract set stats ('row_count'='20', 'ndv'='20', 'min_value'='2mM8l', 'max_value'='yVfotg7Tio3MVhBg6Bkn', 'avg_size'='252', 'max_size'='252' ) -""" - -sql """ -alter table customer_address modify column ca_zip set stats ('row_count'='1000000', 'ndv'='7733', 'min_value'='', 'max_value'='99981', 'avg_size'='4848150', 'max_size'='4848150' ) -""" - -sql """ -alter table store modify column s_county set stats ('row_count'='402', 'ndv'='10', 'min_value'='', 'max_value'='Ziebach County', 'avg_size'='5693', 'max_size'='5693' ) -""" - -sql """ -alter table promotion modify column p_channel_tv set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='986', 'max_size'='986' ) -""" - -sql """ -alter table time_dim modify column t_time_sk set stats ('row_count'='86400', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='691200', 'max_size'='691200' ) -""" - -sql """ -alter table date_dim modify column d_following_holiday set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table store_returns modify column sr_return_time_sk set stats ('row_count'='28795080', 'ndv'='32660', 'min_value'='28799', 'max_value'='61199', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_ship_cost set stats ('row_count'='143997065', 'ndv'='14266', 'min_value'='0.00', 'max_value'='14896.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table item modify column i_brand set stats ('row_count'='204000', 'ndv'='714', 'min_value'='', 'max_value'='univunivamalg #9', 'avg_size'='3287671', 'max_size'='3287671' ) -""" - -sql """ -alter table customer modify column c_current_addr_sk set stats ('row_count'='2000000', 'ndv'='866672', 'min_value'='1', 'max_value'='1000000', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table store modify column s_floor_space set stats ('row_count'='402', 'ndv'='300', 'min_value'='5004767', 'max_value'='9997773', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table inventory modify column inv_warehouse_sk set stats ('row_count'='399330000', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='3194640000', 'max_size'='3194640000' ) -""" - -sql """ -alter table web_site modify column web_county set stats ('row_count'='24', 'ndv'='9', 'min_value'='Barrow County', 'max_value'='Ziebach County', 'avg_size'='331', 'max_size'='331' ) -""" - -sql """ -alter table call_center modify column cc_rec_start_date set stats ('row_count'='30', 'ndv'='4', 'min_value'='1998-01-01', 'max_value'='2002-01-01', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table date_dim modify column d_quarter_name set stats ('row_count'='73049', 'ndv'='799', 'min_value'='1900Q1', 'max_value'='2100Q1', 'avg_size'='438294', 'max_size'='438294' ) -""" - -sql """ -alter table call_center modify column cc_company_name set stats ('row_count'='30', 'ndv'='6', 'min_value'='able', 'max_value'='pri', 'avg_size'='110', 'max_size'='110' ) -""" - -sql """ -alter table customer_demographics modify column cd_credit_rating set stats ('row_count'='1920800', 'ndv'='4', 'min_value'='Good', 'max_value'='Unknown', 'avg_size'='13445600', 'max_size'='13445600' ) -""" - -sql """ -alter table web_returns modify column wr_return_amt_inc_tax set stats ('row_count'='7197670', 'ndv'='19975', 'min_value'='0.00', 'max_value'='29493.38', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table web_site modify column web_company_id set stats ('row_count'='24', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table date_dim modify column d_qoy set stats ('row_count'='73049', 'ndv'='4', 'min_value'='1', 'max_value'='4', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table catalog_sales modify column cs_quantity set stats ('row_count'='143997065', 'ndv'='100', 'min_value'='1', 'max_value'='100', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table web_sales modify column ws_ext_ship_cost set stats ('row_count'='72001237', 'ndv'='13977', 'min_value'='0.00', 'max_value'='14927.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table catalog_sales modify column cs_list_price set stats ('row_count'='143997065', 'ndv'='301', 'min_value'='1.00', 'max_value'='300.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table call_center modify column cc_zip set stats ('row_count'='30', 'ndv'='14', 'min_value'='20059', 'max_value'='75281', 'avg_size'='150', 'max_size'='150' ) -""" - -sql """ -alter table call_center modify column cc_division_name set stats ('row_count'='30', 'ndv'='6', 'min_value'='able', 'max_value'='pri', 'avg_size'='123', 'max_size'='123' ) -""" - -sql """ -alter table store_sales modify column ss_cdemo_sk set stats ('row_count'='287997024', 'ndv'='1916366', 'min_value'='1', 'max_value'='1920800', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table catalog_sales modify column cs_ext_sales_price set stats ('row_count'='143997065', 'ndv'='27598', 'min_value'='0.00', 'max_value'='29808.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_amt_inc_tax set stats ('row_count'='14404374', 'ndv'='21566', 'min_value'='0.00', 'max_value'='29353.87', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table income_band modify column ib_upper_bound set stats ('row_count'='20', 'ndv'='20', 'min_value'='10000', 'max_value'='200000', 'avg_size'='80', 'max_size'='80' ) -""" - -sql """ -alter table item modify column i_color set stats ('row_count'='204000', 'ndv'='93', 'min_value'='', 'max_value'='yellow', 'avg_size'='1094247', 'max_size'='1094247' ) -""" - -sql """ -alter table catalog_sales modify column cs_ship_hdemo_sk set stats ('row_count'='143997065', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store modify column s_tax_precentage set stats ('row_count'='402', 'ndv'='1', 'min_value'='0.00', 'max_value'='0.11', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table item modify column i_units set stats ('row_count'='204000', 'ndv'='22', 'min_value'='', 'max_value'='Unknown', 'avg_size'='852562', 'max_size'='852562' ) -""" - -sql """ -alter table reason modify column r_reason_id set stats ('row_count'='55', 'ndv'='55', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPCAAAAAA', 'avg_size'='880', 'max_size'='880' ) -""" - -sql """ -alter table store_sales modify column ss_ext_list_price set stats ('row_count'='287997024', 'ndv'='19770', 'min_value'='1.00', 'max_value'='20000.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table promotion modify column p_cost set stats ('row_count'='1000', 'ndv'='1', 'min_value'='1000.00', 'max_value'='1000.00', 'avg_size'='8000', 'max_size'='8000' ) -""" - -sql """ -alter table web_site modify column web_state set stats ('row_count'='24', 'ndv'='9', 'min_value'='AL', 'max_value'='TN', 'avg_size'='48', 'max_size'='48' ) -""" - -sql """ -alter table call_center modify column cc_country set stats ('row_count'='30', 'ndv'='1', 'min_value'='United States', 'max_value'='United States', 'avg_size'='390', 'max_size'='390' ) -""" - -sql """ -alter table store modify column s_company_id set stats ('row_count'='402', 'ndv'='1', 'min_value'='1', 'max_value'='1', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table time_dim modify column t_hour set stats ('row_count'='86400', 'ndv'='24', 'min_value'='0', 'max_value'='23', 'avg_size'='345600', 'max_size'='345600' ) -""" - -sql """ -alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'min_value'='1', 'max_value'='801', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table ship_mode modify column sm_code set stats ('row_count'='20', 'ndv'='4', 'min_value'='AIR', 'max_value'='SURFACE', 'avg_size'='87', 'max_size'='87' ) -""" - -sql """ -alter table web_returns modify column wr_returning_hdemo_sk set stats ('row_count'='7197670', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table catalog_returns modify column cr_call_center_sk set stats ('row_count'='14404374', 'ndv'='30', 'min_value'='1', 'max_value'='30', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table household_demographics modify column hd_demo_sk set stats ('row_count'='7200', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='57600', 'max_size'='57600' ) -""" - -sql """ -alter table catalog_returns modify column cr_net_loss set stats ('row_count'='14404374', 'ndv'='11753', 'min_value'='0.50', 'max_value'='15781.83', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table catalog_returns modify column cr_item_sk set stats ('row_count'='14404374', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table store_returns modify column sr_item_sk set stats ('row_count'='28795080', 'ndv'='205012', 'min_value'='1', 'max_value'='204000', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table call_center modify column cc_street_number set stats ('row_count'='30', 'ndv'='15', 'min_value'='406', 'max_value'='984', 'avg_size'='88', 'max_size'='88' ) -""" - -sql """ -alter table promotion modify column p_channel_radio set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='987', 'max_size'='987' ) -""" - -sql """ -alter table call_center modify column cc_name set stats ('row_count'='30', 'ndv'='15', 'min_value'='California', 'max_value'='Pacific Northwest_1', 'avg_size'='401', 'max_size'='401' ) -""" - -sql """ -alter table call_center modify column cc_rec_end_date set stats ('row_count'='30', 'ndv'='3', 'min_value'='2000-01-01', 'max_value'='2001-12-31', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table customer_demographics modify column cd_dep_count set stats ('row_count'='1920800', 'ndv'='7', 'min_value'='0', 'max_value'='6', 'avg_size'='7683200', 'max_size'='7683200' ) -""" - -sql """ -alter table inventory modify column inv_date_sk set stats ('row_count'='399330000', 'ndv'='261', 'min_value'='2450815', 'max_value'='2452635', 'avg_size'='3194640000', 'max_size'='3194640000' ) -""" - -sql """ -alter table customer_demographics modify column cd_demo_sk set stats ('row_count'='1920800', 'ndv'='1916366', 'min_value'='1', 'max_value'='1920800', 'avg_size'='15366400', 'max_size'='15366400' ) -""" - -sql """ -alter table ship_mode modify column sm_ship_mode_sk set stats ('row_count'='20', 'ndv'='20', 'min_value'='1', 'max_value'='20', 'avg_size'='160', 'max_size'='160' ) -""" - -sql """ -alter table store_sales modify column ss_list_price set stats ('row_count'='287997024', 'ndv'='201', 'min_value'='1.00', 'max_value'='200.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - -sql """ -alter table reason modify column r_reason_sk set stats ('row_count'='55', 'ndv'='55', 'min_value'='1', 'max_value'='55', 'avg_size'='440', 'max_size'='440' ) -""" - -sql """ -alter table web_page modify column wp_autogen_flag set stats ('row_count'='2040', 'ndv'='3', 'min_value'='', 'max_value'='Y', 'avg_size'='2015', 'max_size'='2015' ) -""" - -sql """ -alter table web_sales modify column ws_sold_date_sk set stats ('row_count'='72001237', 'ndv'='1820', 'min_value'='2450816', 'max_value'='2452642', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table catalog_returns modify column cr_returning_addr_sk set stats ('row_count'='14404374', 'ndv'='1000237', 'min_value'='1', 'max_value'='1000000', 'avg_size'='115234992', 'max_size'='115234992' ) -""" - -sql """ -alter table web_site modify column web_street_type set stats ('row_count'='24', 'ndv'='15', 'min_value'='Avenue', 'max_value'='Wy', 'avg_size'='96', 'max_size'='96' ) -""" - -sql """ -alter table store modify column s_rec_end_date set stats ('row_count'='402', 'ndv'='3', 'min_value'='1999-03-13', 'max_value'='2001-03-12', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table item modify column i_formulation set stats ('row_count'='204000', 'ndv'='152702', 'min_value'='', 'max_value'='yellow98911509228741', 'avg_size'='4069400', 'max_size'='4069400' ) -""" - -sql """ -alter table customer_demographics modify column cd_education_status set stats ('row_count'='1920800', 'ndv'='7', 'min_value'='2 yr Degree', 'max_value'='Unknown', 'avg_size'='18384800', 'max_size'='18384800' ) -""" - -sql """ -alter table web_page modify column wp_link_count set stats ('row_count'='2040', 'ndv'='24', 'min_value'='2', 'max_value'='25', 'avg_size'='8160', 'max_size'='8160' ) -""" - -sql """ -alter table warehouse modify column w_country set stats ('row_count'='15', 'ndv'='1', 'min_value'='United States', 'max_value'='United States', 'avg_size'='195', 'max_size'='195' ) -""" - -sql """ -alter table catalog_returns modify column cr_store_credit set stats ('row_count'='14404374', 'ndv'='12156', 'min_value'='0.00', 'max_value'='22167.49', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table store modify column s_rec_start_date set stats ('row_count'='402', 'ndv'='4', 'min_value'='1997-03-13', 'max_value'='2001-03-13', 'avg_size'='1608', 'max_size'='1608' ) -""" - -sql """ -alter table web_site modify column web_site_id set stats ('row_count'='24', 'ndv'='12', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAOAAAAAAA', 'avg_size'='384', 'max_size'='384' ) -""" - -sql """ -alter table call_center modify column cc_gmt_offset set stats ('row_count'='30', 'ndv'='2', 'min_value'='-6.00', 'max_value'='-5.00', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table ship_mode modify column sm_ship_mode_id set stats ('row_count'='20', 'ndv'='20', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'avg_size'='320', 'max_size'='320' ) -""" - -sql """ -alter table catalog_returns modify column cr_return_amount set stats ('row_count'='14404374', 'ndv'='20656', 'min_value'='0.00', 'max_value'='28778.31', 'avg_size'='57617496', 'max_size'='57617496' ) -""" - -sql """ -alter table store modify column s_hours set stats ('row_count'='402', 'ndv'='4', 'min_value'='', 'max_value'='8AM-8AM', 'avg_size'='2848', 'max_size'='2848' ) -""" - -sql """ -alter table web_returns modify column wr_returning_cdemo_sk set stats ('row_count'='7197670', 'ndv'='1865149', 'min_value'='1', 'max_value'='1920800', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table catalog_sales modify column cs_warehouse_sk set stats ('row_count'='143997065', 'ndv'='15', 'min_value'='1', 'max_value'='15', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table date_dim modify column d_date set stats ('row_count'='73049', 'ndv'='73250', 'min_value'='1900-01-02', 'max_value'='2100-01-01', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table customer modify column c_first_name set stats ('row_count'='2000000', 'ndv'='5140', 'min_value'='', 'max_value'='Zulma', 'avg_size'='11267996', 'max_size'='11267996' ) -""" - -sql """ -alter table catalog_sales modify column cs_net_profit set stats ('row_count'='143997065', 'ndv'='28450', 'min_value'='-10000.00', 'max_value'='19840.00', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table web_site modify column web_suite_number set stats ('row_count'='24', 'ndv'='20', 'min_value'='Suite 130', 'max_value'='Suite U', 'avg_size'='196', 'max_size'='196' ) -""" - -sql """ -alter table web_sales modify column ws_list_price set stats ('row_count'='72001237', 'ndv'='301', 'min_value'='1.00', 'max_value'='300.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table web_returns modify column wr_returned_time_sk set stats ('row_count'='7197670', 'ndv'='87677', 'min_value'='0', 'max_value'='86399', 'avg_size'='57581360', 'max_size'='57581360' ) -""" - -sql """ -alter table web_sales modify column ws_net_paid_inc_tax set stats ('row_count'='72001237', 'ndv'='28263', 'min_value'='0.00', 'max_value'='32492.90', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table store_returns modify column sr_net_loss set stats ('row_count'='28795080', 'ndv'='8663', 'min_value'='0.50', 'max_value'='10447.72', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table date_dim modify column d_same_day_lq set stats ('row_count'='73049', 'ndv'='72231', 'min_value'='2414930', 'max_value'='2487978', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table store modify column s_suite_number set stats ('row_count'='402', 'ndv'='75', 'min_value'='', 'max_value'='Suite Y', 'avg_size'='3140', 'max_size'='3140' ) -""" - -sql """ -alter table catalog_page modify column cp_start_date_sk set stats ('row_count'='20400', 'ndv'='91', 'min_value'='2450815', 'max_value'='2453005', 'avg_size'='81600', 'max_size'='81600' ) -""" - -sql """ -alter table customer_address modify column ca_street_number set stats ('row_count'='1000000', 'ndv'='1002', 'min_value'='', 'max_value'='999', 'avg_size'='2805540', 'max_size'='2805540' ) -""" - -sql """ -alter table item modify column i_current_price set stats ('row_count'='204000', 'ndv'='100', 'min_value'='0.09', 'max_value'='99.99', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table store_returns modify column sr_ticket_number set stats ('row_count'='28795080', 'ndv'='16790866', 'min_value'='1', 'max_value'='23999996', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table catalog_sales modify column cs_coupon_amt set stats ('row_count'='143997065', 'ndv'='22020', 'min_value'='0.00', 'max_value'='28422.94', 'avg_size'='575988260', 'max_size'='575988260' ) -""" - -sql """ -alter table date_dim modify column d_current_month set stats ('row_count'='73049', 'ndv'='2', 'min_value'='N', 'max_value'='Y', 'avg_size'='73049', 'max_size'='73049' ) -""" - -sql """ -alter table web_sales modify column ws_net_paid_inc_ship_tax set stats ('row_count'='72001237', 'ndv'='37541', 'min_value'='0.00', 'max_value'='44479.52', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table web_sales modify column ws_promo_sk set stats ('row_count'='72001237', 'ndv'='986', 'min_value'='1', 'max_value'='1000', 'avg_size'='576009896', 'max_size'='576009896' ) -""" - -sql """ -alter table customer modify column c_first_shipto_date_sk set stats ('row_count'='2000000', 'ndv'='3644', 'min_value'='2449028', 'max_value'='2452678', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table catalog_page modify column cp_end_date_sk set stats ('row_count'='20400', 'ndv'='97', 'min_value'='2450844', 'max_value'='2453186', 'avg_size'='81600', 'max_size'='81600' ) -""" - -sql """ -alter table store_sales modify column ss_promo_sk set stats ('row_count'='287997024', 'ndv'='986', 'min_value'='1', 'max_value'='1000', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table catalog_page modify column cp_type set stats ('row_count'='20400', 'ndv'='4', 'min_value'='', 'max_value'='quarterly', 'avg_size'='155039', 'max_size'='155039' ) -""" - -sql """ -alter table promotion modify column p_channel_demo set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='984', 'max_size'='984' ) -""" - -sql """ -alter table store modify column s_market_manager set stats ('row_count'='402', 'ndv'='286', 'min_value'='', 'max_value'='Zane Perez', 'avg_size'='5129', 'max_size'='5129' ) -""" - -sql """ -alter table item modify column i_item_desc set stats ('row_count'='204000', 'ndv'='148398', 'min_value'='', 'max_value'='Youngsters used to save quite colour', 'avg_size'='20471814', 'max_size'='20471814' ) -""" - -sql """ -alter table call_center modify column cc_division set stats ('row_count'='30', 'ndv'='6', 'min_value'='1', 'max_value'='6', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table web_site modify column web_class set stats ('row_count'='24', 'ndv'='1', 'min_value'='Unknown', 'max_value'='Unknown', 'avg_size'='168', 'max_size'='168' ) -""" - -sql """ -alter table store modify column s_geography_class set stats ('row_count'='402', 'ndv'='2', 'min_value'='', 'max_value'='Unknown', 'avg_size'='2793', 'max_size'='2793' ) -""" - -sql """ -alter table store_returns modify column sr_store_sk set stats ('row_count'='28795080', 'ndv'='200', 'min_value'='1', 'max_value'='400', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table call_center modify column cc_street_name set stats ('row_count'='30', 'ndv'='15', 'min_value'='1st ', 'max_value'='View ', 'avg_size'='240', 'max_size'='240' ) -""" - -sql """ -alter table date_dim modify column d_moy set stats ('row_count'='73049', 'ndv'='12', 'min_value'='1', 'max_value'='12', 'avg_size'='292196', 'max_size'='292196' ) -""" - -sql """ -alter table customer modify column c_current_hdemo_sk set stats ('row_count'='2000000', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='16000000', 'max_size'='16000000' ) -""" - -sql """ -alter table customer modify column c_login set stats ('row_count'='2000000', 'ndv'='1', 'min_value'='', 'max_value'='', 'avg_size'='0', 'max_size'='0' ) -""" - -sql """ -alter table web_sales modify column ws_ext_discount_amt set stats ('row_count'='72001237', 'ndv'='27052', 'min_value'='0.00', 'max_value'='29982.00', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table call_center modify column cc_call_center_id set stats ('row_count'='30', 'ndv'='15', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAOAAAAAAA', 'avg_size'='480', 'max_size'='480' ) -""" - -sql """ -alter table web_returns modify column wr_reversed_charge set stats ('row_count'='7197670', 'ndv'='10979', 'min_value'='0.00', 'max_value'='22972.36', 'avg_size'='28790680', 'max_size'='28790680' ) -""" - -sql """ -alter table store modify column s_city set stats ('row_count'='402', 'ndv'='19', 'min_value'='', 'max_value'='Union', 'avg_size'='3669', 'max_size'='3669' ) -""" - -sql """ -alter table promotion modify column p_channel_email set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='987', 'max_size'='987' ) -""" - -sql """ -alter table catalog_page modify column cp_department set stats ('row_count'='20400', 'ndv'='2', 'min_value'='', 'max_value'='DEPARTMENT', 'avg_size'='201950', 'max_size'='201950' ) -""" - -sql """ -alter table call_center modify column cc_hours set stats ('row_count'='30', 'ndv'='3', 'min_value'='8AM-12AM', 'max_value'='8AM-8AM', 'avg_size'='214', 'max_size'='214' ) -""" - -sql """ -alter table promotion modify column p_channel_dmail set stats ('row_count'='1000', 'ndv'='3', 'min_value'='', 'max_value'='Y', 'avg_size'='987', 'max_size'='987' ) -""" - -sql """ -alter table store modify column s_manager set stats ('row_count'='402', 'ndv'='301', 'min_value'='', 'max_value'='Zachary Price', 'avg_size'='5075', 'max_size'='5075' ) -""" - -sql """ -alter table store_returns modify column sr_reversed_charge set stats ('row_count'='28795080', 'ndv'='9872', 'min_value'='0.00', 'max_value'='16099.52', 'avg_size'='115180320', 'max_size'='115180320' ) -""" - -sql """ -alter table catalog_sales modify column cs_call_center_sk set stats ('row_count'='143997065', 'ndv'='30', 'min_value'='1', 'max_value'='30', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table household_demographics modify column hd_vehicle_count set stats ('row_count'='7200', 'ndv'='6', 'min_value'='-1', 'max_value'='4', 'avg_size'='28800', 'max_size'='28800' ) -""" - -sql """ -alter table web_site modify column web_company_name set stats ('row_count'='24', 'ndv'='6', 'min_value'='able', 'max_value'='pri', 'avg_size'='97', 'max_size'='97' ) -""" - -sql """ -alter table web_page modify column wp_web_page_id set stats ('row_count'='2040', 'ndv'='1019', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPEAAAAA', 'avg_size'='32640', 'max_size'='32640' ) -""" - -sql """ -alter table store_sales modify column ss_sold_date_sk set stats ('row_count'='287997024', 'ndv'='1820', 'min_value'='2450816', 'max_value'='2452642', 'avg_size'='2303976192', 'max_size'='2303976192' ) -""" - -sql """ -alter table customer_address modify column ca_street_type set stats ('row_count'='1000000', 'ndv'='21', 'min_value'='', 'max_value'='Wy', 'avg_size'='4073296', 'max_size'='4073296' ) -""" - -sql """ -alter table web_sales modify column ws_ext_tax set stats ('row_count'='72001237', 'ndv'='2466', 'min_value'='0.00', 'max_value'='2682.90', 'avg_size'='288004948', 'max_size'='288004948' ) -""" - -sql """ -alter table item modify column i_manufact_id set stats ('row_count'='204000', 'ndv'='1005', 'min_value'='1', 'max_value'='1000', 'avg_size'='816000', 'max_size'='816000' ) -""" - -sql """ -alter table inventory modify column inv_quantity_on_hand set stats ('row_count'='399330000', 'ndv'='1006', 'min_value'='0', 'max_value'='1000', 'avg_size'='1597320000', 'max_size'='1597320000' ) -""" - -sql """ -alter table call_center modify column cc_employees set stats ('row_count'='30', 'ndv'='22', 'min_value'='2935', 'max_value'='69020', 'avg_size'='120', 'max_size'='120' ) -""" - -sql """ -alter table ship_mode modify column sm_carrier set stats ('row_count'='20', 'ndv'='20', 'min_value'='AIRBORNE', 'max_value'='ZOUROS', 'avg_size'='133', 'max_size'='133' ) -""" - -sql """ -alter table store_returns modify column sr_reason_sk set stats ('row_count'='28795080', 'ndv'='55', 'min_value'='1', 'max_value'='55', 'avg_size'='230360640', 'max_size'='230360640' ) -""" - -sql """ -alter table promotion modify column p_discount_active set stats ('row_count'='1000', 'ndv'='2', 'min_value'='', 'max_value'='N', 'avg_size'='981', 'max_size'='981' ) -""" - -sql """ -alter table catalog_sales modify column cs_bill_hdemo_sk set stats ('row_count'='143997065', 'ndv'='7251', 'min_value'='1', 'max_value'='7200', 'avg_size'='1151976520', 'max_size'='1151976520' ) -""" - -sql """ -alter table store_sales modify column ss_wholesale_cost set stats ('row_count'='287997024', 'ndv'='100', 'min_value'='1.00', 'max_value'='100.00', 'avg_size'='1151988096', 'max_size'='1151988096' ) -""" - - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='Y', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='2003-01-01', 'max_value'='2003-12-31', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='1999-01-01', 'max_value'='1999-12-31', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='35793', 'ndv'='35630', 'num_nulls'='0', 'min_value'='1900-01-02', 'max_value'='1997-12-31', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2002-01-01', 'max_value'='2002-12-31', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='35065', 'ndv'='35118', 'num_nulls'='0', 'min_value'='2004-01-01', 'max_value'='2100-01-01', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2001-01-01', 'max_value'='2001-12-31', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='1998-01-01', 'max_value'='1998-12-31', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='366', 'ndv'='366', 'num_nulls'='0', 'min_value'='2000-01-01', 'max_value'='2000-12-31', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='35793', 'ndv'='35428', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'data_size'='572688') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='368', 'num_nulls'='0', 'min_value'='AAAAAAAAAAGGFCAA', 'max_value'='AAAAAAAAPPFGFCAA', 'data_size'='5840') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='AAAAAAAAAAHGFCAA', 'max_value'='AAAAAAAAPPHGFCAA', 'data_size'='5840') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='AAAAAAAAAANGFCAA', 'max_value'='AAAAAAAAPPNGFCAA', 'data_size'='5840') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='AAAAAAAAAAKGFCAA', 'max_value'='AAAAAAAAPPKGFCAA', 'data_size'='5840') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='35065', 'ndv'='35542', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAHFCAA', 'max_value'='AAAAAAAAPPPOFCAA', 'data_size'='561040') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='AAAAAAAAAAMGFCAA', 'max_value'='AAAAAAAAPPLGFCAA', 'data_size'='5840') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='366', 'ndv'='362', 'num_nulls'='0', 'min_value'='AAAAAAAAAAJGFCAA', 'max_value'='AAAAAAAAPPIGFCAA', 'data_size'='5856') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451179', 'data_size'='2920') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451544', 'data_size'='2920') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452275', 'data_size'='2920') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='366', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451910', 'data_size'='2928') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='35065', 'ndv'='35067', 'num_nulls'='0', 'min_value'='2453006', 'max_value'='2488070', 'data_size'='280520') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='365', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452640', 'data_size'='2920') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='35793', 'ndv'='36266', 'num_nulls'='0', 'min_value'='2415022', 'max_value'='2450814', 'data_size'='286344') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='365', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2453005', 'data_size'='2920') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='366', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2614') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='35065', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='250466') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2608') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='35793', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='255663') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2607') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2606') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2606') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2609') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='35793', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='35065', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='366', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='35793', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='366', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='35065', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452245', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451514', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='35793', 'ndv'='1181', 'num_nulls'='0', 'min_value'='2415021', 'max_value'='2450784', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451880', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2452975', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452610', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='35065', 'ndv'='1161', 'num_nulls'='0', 'min_value'='2453006', 'max_value'='2488070', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451149', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='413', 'max_value'='417', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='366', 'ndv'='5', 'num_nulls'='0', 'min_value'='401', 'max_value'='405', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='35793', 'ndv'='394', 'num_nulls'='0', 'min_value'='1', 'max_value'='393', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='393', 'max_value'='397', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='405', 'max_value'='409', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='397', 'max_value'='401', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='409', 'max_value'='413', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='417', 'max_value'='801', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5375', 'max_value'='5427', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5270', 'max_value'='5322', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5323', 'max_value'='5375', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='35793', 'ndv'='5136', 'num_nulls'='0', 'min_value'='1', 'max_value'='5114', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='35065', 'ndv'='5008', 'num_nulls'='0', 'min_value'='5427', 'max_value'='10436', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='366', 'ndv'='53', 'num_nulls'='0', 'min_value'='5218', 'max_value'='5270', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5114', 'max_value'='5166', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5166', 'max_value'='5218', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='2000', 'max_value'='2000', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1998', 'max_value'='1998', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2002', 'max_value'='2002', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1999', 'max_value'='1999', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='35065', 'ndv'='97', 'num_nulls'='0', 'min_value'='2004', 'max_value'='2100', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='35793', 'ndv'='98', 'num_nulls'='0', 'min_value'='1900', 'max_value'='1997', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2001', 'max_value'='2001', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2003', 'max_value'='2003', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452275', 'max_value'='2452943', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451910', 'max_value'='2452578', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='35793', 'ndv'='1186', 'num_nulls'='0', 'min_value'='2415020', 'max_value'='2451117', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452640', 'max_value'='2453308', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451179', 'max_value'='2451847', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='35065', 'ndv'='1144', 'num_nulls'='0', 'min_value'='2453005', 'max_value'='2488372', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451544', 'max_value'='2452214', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2450814', 'max_value'='2451482', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1188', 'max_value'='1199', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='35793', 'ndv'='1176', 'num_nulls'='0', 'min_value'='0', 'max_value'='1175', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1176', 'max_value'='1187', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1236', 'max_value'='1247', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='35065', 'ndv'='1147', 'num_nulls'='0', 'min_value'='1248', 'max_value'='2400', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1212', 'max_value'='1223', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='1200', 'max_value'='1211', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1224', 'max_value'='1235', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='35793', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='35065', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='366', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='35793', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='35065', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2001Q1', 'max_value'='2001Q4', 'data_size'='2190') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='35793', 'ndv'='393', 'num_nulls'='0', 'min_value'='1900Q1', 'max_value'='1997Q4', 'data_size'='214758') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2002Q1', 'max_value'='2002Q4', 'data_size'='2190') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='366', 'ndv'='4', 'num_nulls'='0', 'min_value'='2000Q1', 'max_value'='2000Q4', 'data_size'='2196') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='2004Q1', 'max_value'='2100Q1', 'data_size'='210390') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2003Q1', 'max_value'='2003Q4', 'data_size'='2190') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1998Q1', 'max_value'='1998Q4', 'data_size'='2190') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1999Q1', 'max_value'='1999Q4', 'data_size'='2190') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='35793', 'ndv'='394', 'num_nulls'='0', 'min_value'='1', 'max_value'='393', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='405', 'max_value'='409', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='366', 'ndv'='5', 'num_nulls'='0', 'min_value'='401', 'max_value'='405', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='397', 'max_value'='401', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='409', 'max_value'='413', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='393', 'max_value'='397', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='413', 'max_value'='417', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='417', 'max_value'='801', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='35793', 'ndv'='35806', 'num_nulls'='0', 'min_value'='2414930', 'max_value'='2450722', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2451088', 'max_value'='2451452', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2451819', 'max_value'='2452183', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='361', 'num_nulls'='0', 'min_value'='2450723', 'max_value'='2451087', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2452184', 'max_value'='2452548', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2452549', 'max_value'='2452913', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='35065', 'ndv'='34991', 'num_nulls'='0', 'min_value'='2452914', 'max_value'='2487978', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='366', 'ndv'='365', 'num_nulls'='0', 'min_value'='2451453', 'max_value'='2451818', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452640', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='35793', 'ndv'='35878', 'num_nulls'='0', 'min_value'='2414657', 'max_value'='2450449', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='366', 'ndv'='363', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451544', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='363', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451910', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2450450', 'max_value'='2450814', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='35065', 'ndv'='35076', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2487705', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='363', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451179', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452275', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5270', 'max_value'='5322', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5114', 'max_value'='5166', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5166', 'max_value'='5218', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5375', 'max_value'='5427', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5323', 'max_value'='5375', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='35065', 'ndv'='5008', 'num_nulls'='0', 'min_value'='5427', 'max_value'='10436', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='35793', 'ndv'='5136', 'num_nulls'='0', 'min_value'='1', 'max_value'='5114', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='366', 'ndv'='53', 'num_nulls'='0', 'min_value'='5218', 'max_value'='5270', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1999', 'max_value'='1999', 'data_size'='1460') partition (p1999);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='35793', 'ndv'='98', 'num_nulls'='0', 'min_value'='1900', 'max_value'='1997', 'data_size'='143172') partition (ppast);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1998', 'max_value'='1998', 'data_size'='1460') partition (p1998);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2001', 'max_value'='2001', 'data_size'='1460') partition (p2001);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2002', 'max_value'='2002', 'data_size'='1460') partition (p2002);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='2000', 'max_value'='2000', 'data_size'='1464') partition (p2000);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2003', 'max_value'='2003', 'data_size'='1460') partition (p2003);""" - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='35065', 'ndv'='97', 'num_nulls'='0', 'min_value'='2004', 'max_value'='2100', 'data_size'='140260') partition (pfuture);""" - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_day set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_month set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_quarter set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_week set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='Y', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='35793', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_current_year set stats ('row_count'='35065', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='2003-01-01', 'max_value'='2003-12-31', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='1999-01-01', 'max_value'='1999-12-31', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='35793', 'ndv'='35630', 'num_nulls'='0', 'min_value'='1900-01-02', 'max_value'='1997-12-31', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2002-01-01', 'max_value'='2002-12-31', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='35065', 'ndv'='35118', 'num_nulls'='0', 'min_value'='2004-01-01', 'max_value'='2100-01-01', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2001-01-01', 'max_value'='2001-12-31', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='1998-01-01', 'max_value'='1998-12-31', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_date set stats ('row_count'='366', 'ndv'='366', 'num_nulls'='0', 'min_value'='2000-01-01', 'max_value'='2000-12-31', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='35793', 'ndv'='35428', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'data_size'='572688') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='368', 'num_nulls'='0', 'min_value'='AAAAAAAAAAGGFCAA', 'max_value'='AAAAAAAAPPFGFCAA', 'data_size'='5840') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='AAAAAAAAAAHGFCAA', 'max_value'='AAAAAAAAPPHGFCAA', 'data_size'='5840') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='AAAAAAAAAANGFCAA', 'max_value'='AAAAAAAAPPNGFCAA', 'data_size'='5840') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='AAAAAAAAAAKGFCAA', 'max_value'='AAAAAAAAPPKGFCAA', 'data_size'='5840') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='35065', 'ndv'='35542', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAHFCAA', 'max_value'='AAAAAAAAPPPOFCAA', 'data_size'='561040') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='AAAAAAAAAAMGFCAA', 'max_value'='AAAAAAAAPPLGFCAA', 'data_size'='5840') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_date_id set stats ('row_count'='366', 'ndv'='362', 'num_nulls'='0', 'min_value'='AAAAAAAAAAJGFCAA', 'max_value'='AAAAAAAAPPIGFCAA', 'data_size'='5856') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451179', 'data_size'='2920') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='366', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451544', 'data_size'='2920') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452275', 'data_size'='2920') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='366', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451910', 'data_size'='2928') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='35065', 'ndv'='35067', 'num_nulls'='0', 'min_value'='2453006', 'max_value'='2488070', 'data_size'='280520') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='365', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452640', 'data_size'='2920') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='35793', 'ndv'='36266', 'num_nulls'='0', 'min_value'='2415022', 'max_value'='2450814', 'data_size'='286344') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_date_sk set stats ('row_count'='365', 'ndv'='365', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2453005', 'data_size'='2920') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='366', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2614') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='35065', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='250466') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2608') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='35793', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='255663') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2607') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2606') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2606') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_day_name set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='2609') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='35793', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='35065', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='366', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_dom set stats ('row_count'='365', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='35793', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='365', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='366', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_dow set stats ('row_count'='35065', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452245', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451514', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='35793', 'ndv'='1181', 'num_nulls'='0', 'min_value'='2415021', 'max_value'='2450784', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451880', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2452975', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452610', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='35065', 'ndv'='1161', 'num_nulls'='0', 'min_value'='2453006', 'max_value'='2488070', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_first_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451149', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_following_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='413', 'max_value'='417', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='366', 'ndv'='5', 'num_nulls'='0', 'min_value'='401', 'max_value'='405', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='35793', 'ndv'='394', 'num_nulls'='0', 'min_value'='1', 'max_value'='393', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='393', 'max_value'='397', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='405', 'max_value'='409', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='397', 'max_value'='401', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='409', 'max_value'='413', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='417', 'max_value'='801', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5375', 'max_value'='5427', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5270', 'max_value'='5322', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5323', 'max_value'='5375', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='35793', 'ndv'='5136', 'num_nulls'='0', 'min_value'='1', 'max_value'='5114', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='35065', 'ndv'='5008', 'num_nulls'='0', 'min_value'='5427', 'max_value'='10436', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='366', 'ndv'='53', 'num_nulls'='0', 'min_value'='5218', 'max_value'='5270', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5114', 'max_value'='5166', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_fy_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5166', 'max_value'='5218', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='2000', 'max_value'='2000', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1998', 'max_value'='1998', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2002', 'max_value'='2002', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1999', 'max_value'='1999', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='35065', 'ndv'='97', 'num_nulls'='0', 'min_value'='2004', 'max_value'='2100', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='35793', 'ndv'='98', 'num_nulls'='0', 'min_value'='1900', 'max_value'='1997', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2001', 'max_value'='2001', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_fy_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2003', 'max_value'='2003', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_holiday set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452275', 'max_value'='2452943', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451910', 'max_value'='2452578', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='35793', 'ndv'='1186', 'num_nulls'='0', 'min_value'='2415020', 'max_value'='2451117', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2452640', 'max_value'='2453308', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451179', 'max_value'='2451847', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='35065', 'ndv'='1144', 'num_nulls'='0', 'min_value'='2453005', 'max_value'='2488372', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='2451544', 'max_value'='2452214', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_last_dom set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='2450814', 'max_value'='2451482', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1188', 'max_value'='1199', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='35793', 'ndv'='1176', 'num_nulls'='0', 'min_value'='0', 'max_value'='1175', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1176', 'max_value'='1187', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1236', 'max_value'='1247', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='35065', 'ndv'='1147', 'num_nulls'='0', 'min_value'='1248', 'max_value'='2400', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1212', 'max_value'='1223', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='1200', 'max_value'='1211', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_month_seq set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1224', 'max_value'='1235', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='35793', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='35065', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='366', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_moy set stats ('row_count'='365', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='366', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='35793', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_qoy set stats ('row_count'='35065', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2001Q1', 'max_value'='2001Q4', 'data_size'='2190') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='35793', 'ndv'='393', 'num_nulls'='0', 'min_value'='1900Q1', 'max_value'='1997Q4', 'data_size'='214758') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2002Q1', 'max_value'='2002Q4', 'data_size'='2190') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='366', 'ndv'='4', 'num_nulls'='0', 'min_value'='2000Q1', 'max_value'='2000Q4', 'data_size'='2196') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='2004Q1', 'max_value'='2100Q1', 'data_size'='210390') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='2003Q1', 'max_value'='2003Q4', 'data_size'='2190') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1998Q1', 'max_value'='1998Q4', 'data_size'='2190') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_name set stats ('row_count'='365', 'ndv'='4', 'num_nulls'='0', 'min_value'='1999Q1', 'max_value'='1999Q4', 'data_size'='2190') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='35793', 'ndv'='394', 'num_nulls'='0', 'min_value'='1', 'max_value'='393', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='405', 'max_value'='409', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='366', 'ndv'='5', 'num_nulls'='0', 'min_value'='401', 'max_value'='405', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='397', 'max_value'='401', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='409', 'max_value'='413', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='393', 'max_value'='397', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='365', 'ndv'='5', 'num_nulls'='0', 'min_value'='413', 'max_value'='417', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_quarter_seq set stats ('row_count'='35065', 'ndv'='387', 'num_nulls'='0', 'min_value'='417', 'max_value'='801', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='35793', 'ndv'='35806', 'num_nulls'='0', 'min_value'='2414930', 'max_value'='2450722', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2451088', 'max_value'='2451452', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2451819', 'max_value'='2452183', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='361', 'num_nulls'='0', 'min_value'='2450723', 'max_value'='2451087', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2452184', 'max_value'='2452548', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='365', 'ndv'='362', 'num_nulls'='0', 'min_value'='2452549', 'max_value'='2452913', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='35065', 'ndv'='34991', 'num_nulls'='0', 'min_value'='2452914', 'max_value'='2487978', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_lq set stats ('row_count'='366', 'ndv'='365', 'num_nulls'='0', 'min_value'='2451453', 'max_value'='2451818', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2452276', 'max_value'='2452640', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='35793', 'ndv'='35878', 'num_nulls'='0', 'min_value'='2414657', 'max_value'='2450449', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='366', 'ndv'='363', 'num_nulls'='0', 'min_value'='2451180', 'max_value'='2451544', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='363', 'num_nulls'='0', 'min_value'='2451545', 'max_value'='2451910', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='364', 'num_nulls'='0', 'min_value'='2450450', 'max_value'='2450814', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='35065', 'ndv'='35076', 'num_nulls'='0', 'min_value'='2452641', 'max_value'='2487705', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='363', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2451179', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_same_day_ly set stats ('row_count'='365', 'ndv'='367', 'num_nulls'='0', 'min_value'='2451911', 'max_value'='2452275', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5270', 'max_value'='5322', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5114', 'max_value'='5166', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5166', 'max_value'='5218', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5375', 'max_value'='5427', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='365', 'ndv'='53', 'num_nulls'='0', 'min_value'='5323', 'max_value'='5375', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='35065', 'ndv'='5008', 'num_nulls'='0', 'min_value'='5427', 'max_value'='10436', 'data_size'='140260') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='35793', 'ndv'='5136', 'num_nulls'='0', 'min_value'='1', 'max_value'='5114', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_week_seq set stats ('row_count'='366', 'ndv'='53', 'num_nulls'='0', 'min_value'='5218', 'max_value'='5270', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='366', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='366') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='35793', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35793') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='35065', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='35065') partition (pfuture); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_weekend set stats ('row_count'='365', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='365') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1999', 'max_value'='1999', 'data_size'='1460') partition (p1999); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='35793', 'ndv'='98', 'num_nulls'='0', 'min_value'='1900', 'max_value'='1997', 'data_size'='143172') partition (ppast); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='1998', 'max_value'='1998', 'data_size'='1460') partition (p1998); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2001', 'max_value'='2001', 'data_size'='1460') partition (p2001); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2002', 'max_value'='2002', 'data_size'='1460') partition (p2002); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='366', 'ndv'='1', 'num_nulls'='0', 'min_value'='2000', 'max_value'='2000', 'data_size'='1464') partition (p2000); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='365', 'ndv'='1', 'num_nulls'='0', 'min_value'='2003', 'max_value'='2003', 'data_size'='1460') partition (p2003); -// """ - -// sql """ -// alter table date_dim modify column d_year set stats ('row_count'='35065', 'ndv'='97', 'num_nulls'='0', 'min_value'='2004', 'max_value'='2100', 'data_size'='140260') partition (pfuture); -// """ - - - -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.groovy deleted file mode 100644 index ac9bfd0d8cb6b4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query1.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - qt_ds_shape_1 ''' - explain shape plan -with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'SD' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.groovy deleted file mode 100644 index 903d035d22e918..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query10.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_10 ''' - explain shape plan - - - - -select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Storey County','Marquette County','Warren County','Cochran County','Kandiyohi County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 ANd 1+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.groovy deleted file mode 100644 index e3712ab6cee089..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query11.groovy +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_11 ''' - explain shape plan - - - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 2001 - and t_s_secyear.dyear = 2001+1 - and t_w_firstyear.dyear = 2001 - and t_w_secyear.dyear = 2001+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.groovy deleted file mode 100644 index 04255c01f57aa1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query12.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_12 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Books', 'Sports', 'Men') - and ws_sold_date_sk = d_date_sk - and d_date between cast('1998-04-06' as date) - and (cast('1998-04-06' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.groovy deleted file mode 100644 index f1d8338c174e11..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query13.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_13 ''' - explain shape plan - - -select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Unknown' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'S' - and cd_education_status = 'College' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = '4 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('SD', 'KS', 'MI') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MO', 'ND', 'CO') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('NH', 'OH', 'TX') - and ss_net_profit between 50 and 250 - )) -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.groovy deleted file mode 100644 index d9f561d7fd313f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query14.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_14 ''' - explain shape plan - - -with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 2000 AND 2000 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 2000 AND 2000 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 2000 AND 2000 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), - avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.groovy deleted file mode 100644 index 7e5f7c5ff17bd2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query15.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_15 ''' - explain shape plan - - - -select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.groovy deleted file mode 100644 index 81886a513f9a0e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query16.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_16 ''' - explain shape plan - - - - -select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'WV' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Ziebach County','Luce County','Richland County','Daviess County', - 'Barrow County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.groovy deleted file mode 100644 index 7f53dfaa3a0c16..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query17.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_17 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.groovy deleted file mode 100644 index fec13a57cf6313..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query18.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_18 ''' - explain shape plan - - - - -select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Advanced Degree' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (10,7,8,4,1,2) and - d_year = 1998 and - ca_state in ('WA','GA','NC' - ,'ME','WY','OK','IN') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.groovy deleted file mode 100644 index beb66b47ff57f5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query19.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_19 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=2 - and d_moy=12 - and d_year=1999 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.groovy deleted file mode 100644 index 2fb61ba9fbd3da..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query2.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_ds_shape_2 ''' - explain shape plan - - - - -with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.groovy deleted file mode 100644 index aead630449670f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query20.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_20 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Shoes', 'Books', 'Women') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-01-26' as date) - and (cast('2002-01-26' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.groovy deleted file mode 100644 index 8d7a39c97354b0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query21.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_21 ''' - explain shape plan - - - - -select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('2002-02-27' as date) - interval 30 day) - and (cast ('2002-02-27' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.groovy deleted file mode 100644 index c31efd8b73852d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query22.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_22 ''' - explain shape plan - - - - -select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1188 and 1188 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.groovy deleted file mode 100644 index 91494ee79761b8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query23.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_23 ''' - explain shape plan - - - -with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.groovy deleted file mode 100644 index bfbd00add04fb4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query24.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_24 ''' - explain shape plan - - - - with ssales as - (select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_profit) netpaid - from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address - where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip - and s_market_id=8 - group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) - select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid - from ssales - where i_color = 'beige' - group by c_last_name - ,c_first_name - ,s_store_name - having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) - order by c_last_name - ,c_first_name - ,s_store_name - ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.groovy deleted file mode 100644 index 0bb08f70c81758..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query25.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_25 ''' - explain shape plan - - - -select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,sum(ss_net_profit) as store_sales_profit - ,sum(sr_net_loss) as store_returns_loss - ,sum(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 2000 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 2000 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.groovy deleted file mode 100644 index 01f80b4596d062..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query26.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_26 ''' - explain shape plan - - - - -select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'S' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.groovy deleted file mode 100644 index 407f2ab9ea9af7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query27.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_27 ''' - explain shape plan - - - -select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'F' and - cd_marital_status = 'D' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('MO','AL', 'MI', 'TN', 'LA', 'SC') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.groovy deleted file mode 100644 index aacdcc03a4a946..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query28.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_28 ''' - explain shape plan - - - - -select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 131 and 131+10 - or ss_coupon_amt between 16798 and 16798+1000 - or ss_wholesale_cost between 25 and 25+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 145 and 145+10 - or ss_coupon_amt between 14792 and 14792+1000 - or ss_wholesale_cost between 46 and 46+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 150 and 150+10 - or ss_coupon_amt between 6600 and 6600+1000 - or ss_wholesale_cost between 9 and 9+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 91 and 91+10 - or ss_coupon_amt between 13493 and 13493+1000 - or ss_wholesale_cost between 36 and 36+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 0 and 0+10 - or ss_coupon_amt between 7629 and 7629+1000 - or ss_wholesale_cost between 6 and 6+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 15257 and 15257+1000 - or ss_wholesale_cost between 31 and 31+20)) B6 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.groovy deleted file mode 100644 index 98f4c2ffd57a4d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query29.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_29 ''' - explain shape plan - - - - -select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,avg(ss_quantity) as store_sales_quantity - ,avg(sr_return_quantity) as store_returns_quantity - ,avg(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1999,1999+1,1999+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.groovy deleted file mode 100644 index d91dc9bdbc63f3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query3.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_ds_shape_3 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.groovy deleted file mode 100644 index f3c4f793cb206e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query30.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_30 ''' - explain shape plan - - - - -with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2002 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'IN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.groovy deleted file mode 100644 index 35a6d672ddc2dd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query31.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_31 ''' - explain shape plan - - - - -with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 2000 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 2000 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 2000 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 2000 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 2000 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =2000 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by web_q1_q2_increase; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.groovy deleted file mode 100644 index 0e712c8804d29c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query32.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_32 ''' - explain shape plan - - - - -select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 29 -and i_item_sk = cs_item_sk -and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.groovy deleted file mode 100644 index 61fe0c580384d0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query33.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_33 ''' - explain shape plan - - - - -with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.groovy deleted file mode 100644 index 7b0a9cb22fffbc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query34.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_34 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_county in ('Ziebach County','Daviess County','Walker County','Richland County', - 'Barrow County','Franklin Parish','Williamson County','Luce County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.groovy deleted file mode 100644 index 715fb701b86a60..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query35.groovy +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_35 ''' - explain shape plan - - - - -select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - max(cd_dep_count), - sum(cd_dep_count), - max(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - max(cd_dep_employed_count), - sum(cd_dep_employed_count), - max(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - max(cd_dep_college_count), - sum(cd_dep_college_count), - max(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.groovy deleted file mode 100644 index b67ecc699fec84..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query36.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_36 ''' - explain shape plan - - - - -select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2002 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('SD','TN','GA','SC', - 'MO','AL','MI','OH') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.groovy deleted file mode 100644 index ee3b6b8ebf2224..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query37.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_37 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 45 and 45 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-02-21' as date) and (cast('1999-02-21' as date) + interval 60 day) - and i_manufact_id in (856,707,1000,747) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.groovy deleted file mode 100644 index 2e1a2d1a5a12fa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query38.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_38 ''' - explain shape plan - - - - -select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 -) hot_cust -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.groovy deleted file mode 100644 index f041f52ac11295..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query39.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_39 ''' - explain shape plan - - - - -with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =1998 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.groovy deleted file mode 100644 index 1af702e366764a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query4.groovy +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_4 ''' - explain shape plan - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.groovy deleted file mode 100644 index b943f2471b568b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query40.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_40 ''' - explain shape plan - - - -select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-04-02' as date) - interval 30 day) - and (cast ('2001-04-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.groovy deleted file mode 100644 index c14fdbb8a94abb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query41.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_41 ''' - explain shape plan - - - - -select distinct(i_product_name) - from item i1 - where i_manufact_id between 748 and 748+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'gainsboro' or i_color = 'aquamarine') and - (i_units = 'Ounce' or i_units = 'Dozen') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'chiffon' or i_color = 'violet') and - (i_units = 'Ton' or i_units = 'Pound') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'chartreuse' or i_color = 'blue') and - (i_units = 'Each' or i_units = 'Oz') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'tan' or i_color = 'dodger') and - (i_units = 'Bunch' or i_units = 'Tsp') and - (i_size = 'medium' or i_size = 'economy') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'blanched' or i_color = 'tomato') and - (i_units = 'Tbl' or i_units = 'Case') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'almond' or i_color = 'lime') and - (i_units = 'Box' or i_units = 'Dram') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'peru' or i_color = 'saddle') and - (i_units = 'Pallet' or i_units = 'Gram') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'indian' or i_color = 'spring') and - (i_units = 'Unknown' or i_units = 'Carton') and - (i_size = 'medium' or i_size = 'economy') - )))) > 0 - order by i_product_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.groovy deleted file mode 100644 index e533209b57668f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query42.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_42 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=2002 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.groovy deleted file mode 100644 index cc53ae91ec0368..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query43.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_43 ''' - explain shape plan - - - - -select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.groovy deleted file mode 100644 index cc655c04360072..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query44.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_44 ''' - explain shape plan - - - - -select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.groovy deleted file mode 100644 index f60e79ad7c85ff..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query45.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_45 ''' - explain shape plan - - - - -select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.groovy deleted file mode 100644 index 8a0afcea593227..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query46.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_46 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 6 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (1999,1999+1,1999+2) - and store.s_city in ('Five Points','Centerville','Oak Grove','Fairview','Liberty') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.groovy deleted file mode 100644 index 4845dfd8a32c6f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query47.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_47 ''' - explain shape plan - - -with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.groovy deleted file mode 100644 index 9f6f638c5fd373..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query48.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_48 ''' - explain shape plan - - - - -select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 1999 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'U' - and - cd_education_status = 'Primary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'W' - and - cd_education_status = 'College' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'MN', 'IA') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('VA', 'IL', 'TX') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MI', 'WI', 'IN') - and ss_net_profit between 50 and 25000 - ) - ) -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.groovy deleted file mode 100644 index b13177064ac384..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query49.groovy +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_49 ''' - explain shape plan - - - - -select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.groovy deleted file mode 100644 index 222b4ae79923b6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query5.groovy +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_5 ''' - explain shape plan - - - - -with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.groovy deleted file mode 100644 index e97fd67f20d883..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query50.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_50 ''' - explain shape plan - - - - -select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.groovy deleted file mode 100644 index e80a42a2a9e8c4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query51.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_51 ''' - explain shape plan - - - - -WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.groovy deleted file mode 100644 index b827f1c2c0dc39..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query52.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_52 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2002 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.groovy deleted file mode 100644 index fb8fe661c8b401..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query53.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_53 ''' - explain shape plan - - - - -select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1200,1200+1,1200+2,1200+3,1200+4,1200+5,1200+6,1200+7,1200+8,1200+9,1200+10,1200+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.groovy deleted file mode 100644 index a919601deab68b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query54.groovy +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_54 ''' - explain shape plan - - - - -with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Women' - and i_class = 'maternity' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 5 - and d_year = 1998 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1998 and d_moy = 5) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1998 and d_moy = 5) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.groovy deleted file mode 100644 index a1760bd16d2a23..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query55.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_55 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=100 - and d_moy=12 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.groovy deleted file mode 100644 index 6e611882852e61..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query56.groovy +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_56 ''' - explain shape plan - - - - -with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.groovy deleted file mode 100644 index 5be0c3041c2082..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query57.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_57 ''' - explain shape plan - - - -with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 1999 or - ( d_year = 1999-1 and d_moy =12) or - ( d_year = 1999+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_brand - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 1999 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.groovy deleted file mode 100644 index a34ff64eee162c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query58.groovy +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_58 ''' - explain shape plan - - - - -with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.groovy deleted file mode 100644 index be96d9b5ffe71c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query59.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_59 ''' - explain shape plan - - - -with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196 and 1196 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196+ 12 and 1196 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.groovy deleted file mode 100644 index bbd815ee463cb0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query6.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - // TODO: uncomment following line to get better shape - // sql 'set max_join_number_bushy_tree=6' - - qt_ds_shape_6 ''' - explain shape plan - - - - -select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.groovy deleted file mode 100644 index 6c60d4a4b7ca22..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query60.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_60 ''' - explain shape plan - - - - -with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.groovy deleted file mode 100644 index 11dc3db78d1f35..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query61.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_61 ''' - explain shape plan - - - - -select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) all_sales -order by promotions, total -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.groovy deleted file mode 100644 index 6eac332278c1b4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query62.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_62 ''' - explain shape plan - - - - -select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.groovy deleted file mode 100644 index 600981b9cebedc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query63.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_63 ''' - explain shape plan - - - - -select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1181,1181+1,1181+2,1181+3,1181+4,1181+5,1181+6,1181+7,1181+8,1181+9,1181+10,1181+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.groovy deleted file mode 100644 index 9c06fec135a9a9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query64.groovy +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds64 = ''' - with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), - cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('blanched','medium','brown','chocolate','burlywood','drab') and - i_current_price between 23 and 23 + 10 and - i_current_price between 23 + 1 and 23 + 15 - group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year - ) - select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt - from cross_sales cs1,cross_sales cs2 - where cs1.item_sk=cs2.item_sk and - cs1.syear = 2001 and - cs2.syear = 2001 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip - order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1; - - ''' - - qt_ds_shape_64 'explain shape plan ' + ds64 - -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.groovy deleted file mode 100644 index bec1515c2ee85e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query65.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_65 ''' - explain shape plan - - - - -select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.groovy deleted file mode 100644 index 83804dfb545594..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query66.groovy +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_66 ''' - explain shape plan - - - - -select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 and 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 AND 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.groovy deleted file mode 100644 index e6a5a63192f5a8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query67.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_67 ''' - explain shape plan - - - - -select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1206 and 1206+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.groovy deleted file mode 100644 index e9dd6bb74416c0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query68.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_68 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= -1) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Pleasant Hill','Five Points') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.groovy deleted file mode 100644 index b80eec4fc048d6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query69.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_69 ''' - explain shape plan - - - - -select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('TX','VA','MI') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.groovy deleted file mode 100644 index 547f1c0ac3afeb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query7.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_7 ''' - explain shape plan - - - - -select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.groovy deleted file mode 100644 index ec131a31f78102..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query70.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_70 ''' - explain shape plan - - - - -select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1213 and 1213+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1213 and 1213+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.groovy deleted file mode 100644 index 34836552508d38..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query71.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_71 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=1998 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.groovy deleted file mode 100644 index 30d399176fa3a4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query72.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_72 ''' - explain shape plan - - - - -select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and d3.d_date > d1.d_date + 5 - and hd_buy_potential = '501-1000' - and d1.d_year = 2002 - and cd_marital_status = 'W' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.groovy deleted file mode 100644 index 906dce3697abad..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query73.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_73 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '501-1000' or - household_demographics.hd_buy_potential = 'Unknown') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Fairfield County','Walker County','Daviess County','Barrow County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.groovy deleted file mode 100644 index cd7c8028b6d47b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query74.groovy +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_74 ''' - explain shape plan - - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 2,1,3 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.groovy deleted file mode 100644 index 1845cf2c2acd21..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query75.groovy +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_75 ''' - explain shape plan - - - - -WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Home') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=1999 - AND prev_yr.d_year=1999-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.groovy deleted file mode 100644 index cf5fa8f8c232e5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query76.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_76 ''' - explain shape plan - - - - -select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_hdemo_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_bill_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_bill_addr_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_warehouse_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_warehouse_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.groovy deleted file mode 100644 index fb196585a8b692..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query77.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_77 ''' - explain shape plan - - - - -with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.groovy deleted file mode 100644 index 70587543e584fd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query78.groovy +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_78 ''' - explain shape plan - - - - -with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null - group by d_year, ss_item_sk, ss_customer_sk - ) - select -ss_item_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2000 -order by - ss_item_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.groovy deleted file mode 100644 index c5eeb99d6d7603..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query79.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_79 ''' - explain shape plan - - - - -select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count > 4) - and date_dim.d_dow = 1 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.groovy deleted file mode 100644 index b01eab837b63ac..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query8.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_8 ''' - explain shape plan - - - - -select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.groovy deleted file mode 100644 index c3973871b98c1d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query80.groovy +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_80 ''' - explain shape plan - - - - -with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.groovy deleted file mode 100644 index b86d7bb4dc575d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query81.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_81 ''' - explain shape plan - - - - -with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2002 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'CA' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.groovy deleted file mode 100644 index c6ea139fcb164b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query82.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_82 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 17 and 17+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-07-09' as date) and (cast('1999-07-09' as date) + interval 60 day) - and i_manufact_id in (639,169,138,339) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.groovy deleted file mode 100644 index c363bd4037969c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query83.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_83 ''' - explain shape plan - - - - -with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.groovy deleted file mode 100644 index a030a4c1ae132f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query84.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_84 ''' - explain shape plan - - - - -select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Oakwood' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 5806 - and ib_upper_bound <= 5806 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.groovy deleted file mode 100644 index c8931084b4a0e1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query85.groovy +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_85 ''' - explain shape plan - - - - -select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 2000 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'M' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = '4 yr Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Secondary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'W' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('FL', 'TX', 'DE') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('IN', 'ND', 'ID') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('MT', 'IL', 'OH') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.groovy deleted file mode 100644 index 669e3fae1d357d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query86.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_86 ''' - explain shape plan - - - - -select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1224 and 1224+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.groovy deleted file mode 100644 index baf43511edeed0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query87.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_87 ''' - explain shape plan - - - - -select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) -) cool_cust -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.groovy deleted file mode 100644 index cc1dc227786134..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query88.groovy +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_88 ''' - explain shape plan - - - - -select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.groovy deleted file mode 100644 index 4ca9b8f69f923d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query89.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_89 ''' - explain shape plan - - - - -select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (1999) and - ((i_category in ('Jewelry','Shoes','Electronics') and - i_class in ('semi-precious','athletic','portable') - ) - or (i_category in ('Men','Music','Women') and - i_class in ('accessories','rock','maternity') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.groovy deleted file mode 100644 index 72321f8a522999..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query9.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - qt_ds_shape_9 ''' - explain shape plan - - - - -select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 2972190 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 4505785 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 1575726 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 3188917 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 3525216 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.groovy deleted file mode 100644 index 25e457398299db..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query90.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_90 ''' - explain shape plan - - - - -select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 10 and 10+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 16 and 16+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.groovy deleted file mode 100644 index 655387b20a885c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query91.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_91 ''' - explain shape plan - - - - -select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2001 -and d_moy = 11 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like '1001-5000%' -and ca_gmt_offset = -6 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.groovy deleted file mode 100644 index 1e6bc33766f29f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query92.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_92 ''' - explain shape plan - - - - -select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 320 -and i_item_sk = ws_item_sk -and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.groovy deleted file mode 100644 index 067576f5ac8318..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query93.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_93 ''' - explain shape plan - - - - -select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'duplicate purchase') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.groovy deleted file mode 100644 index 5832e550cdcdc8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query94.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_94 ''' - explain shape plan - - - - -select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2000-2-01' and - (cast('2000-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.groovy deleted file mode 100644 index 10a0394d19d56b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query95.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_95 ''' - explain shape plan - - -with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '1999-2-01' and - (cast('1999-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'NC' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.groovy deleted file mode 100644 index ee419513883b8b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query96.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_96 ''' - explain shape plan - - - - -select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 3 - and store.s_store_name = 'ese' -order by count(*) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.groovy deleted file mode 100644 index 253ea22b361497..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query97.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_97 ''' - explain shape plan - - - - -with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.groovy deleted file mode 100644 index f48270d50ad8d6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query98.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_98 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Sports', 'Music', 'Shoes') - and ss_sold_date_sk = d_date_sk - and d_date between cast('2002-05-20' as date) - and (cast('2002-05-20' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.groovy deleted file mode 100644 index a2bb765ae6bd3b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/noStatsRfPrune/query99.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=true' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_99 ''' - explain shape plan - - - - -select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1224 and 1224 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.groovy deleted file mode 100644 index 6fe68f53233903..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query1.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - qt_ds_shape_1 ''' - explain shape plan -with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'SD' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.groovy deleted file mode 100644 index fc895a2048160c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query10.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_10 ''' - explain shape plan - - - - -select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Storey County','Marquette County','Warren County','Cochran County','Kandiyohi County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 ANd 1+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.groovy deleted file mode 100644 index e63b9d8f685006..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query11.groovy +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_11 ''' - explain shape plan - - - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 2001 - and t_s_secyear.dyear = 2001+1 - and t_w_firstyear.dyear = 2001 - and t_w_secyear.dyear = 2001+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.groovy deleted file mode 100644 index 47ac46798f9264..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query12.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_12 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Books', 'Sports', 'Men') - and ws_sold_date_sk = d_date_sk - and d_date between cast('1998-04-06' as date) - and (cast('1998-04-06' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.groovy deleted file mode 100644 index 9aa9d301878f38..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query13.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_13 ''' - explain shape plan - - -select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Unknown' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'S' - and cd_education_status = 'College' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = '4 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('SD', 'KS', 'MI') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MO', 'ND', 'CO') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('NH', 'OH', 'TX') - and ss_net_profit between 50 and 250 - )) -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.groovy deleted file mode 100644 index a491062d05363e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query14.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_14 ''' - explain shape plan - - -with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 2000 AND 2000 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 2000 AND 2000 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 2000 AND 2000 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), - avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.groovy deleted file mode 100644 index c42ff735202802..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query15.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_15 ''' - explain shape plan - - - -select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.groovy deleted file mode 100644 index 6516e0d007dc92..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query16.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_16 ''' - explain shape plan - - - - -select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'WV' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Ziebach County','Luce County','Richland County','Daviess County', - 'Barrow County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.groovy deleted file mode 100644 index 367fee559d579b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query17.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_17 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.groovy deleted file mode 100644 index beb7e30d525844..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query18.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_18 ''' - explain shape plan - - - - -select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Advanced Degree' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (10,7,8,4,1,2) and - d_year = 1998 and - ca_state in ('WA','GA','NC' - ,'ME','WY','OK','IN') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.groovy deleted file mode 100644 index b845f9d6a08212..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query19.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_19 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=2 - and d_moy=12 - and d_year=1999 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.groovy deleted file mode 100644 index 89c63655fc5c3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query2.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_ds_shape_2 ''' - explain shape plan - - - - -with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.groovy deleted file mode 100644 index 2262f12f7e2a77..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query20.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_20 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Shoes', 'Books', 'Women') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-01-26' as date) - and (cast('2002-01-26' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.groovy deleted file mode 100644 index 4d9d0f7b8f5639..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query21.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_21 ''' - explain shape plan - - - - -select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('2002-02-27' as date) - interval 30 day) - and (cast ('2002-02-27' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.groovy deleted file mode 100644 index 0feaf26850b36f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query22.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_22 ''' - explain shape plan - - - - -select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1188 and 1188 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.groovy deleted file mode 100644 index 3d8aee1907c112..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query23.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_23 ''' - explain shape plan - - - -with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.groovy deleted file mode 100644 index 8ce1a2c29f8b7f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query24.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_24 ''' - explain shape plan - - - - with ssales as - (select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_profit) netpaid - from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address - where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip - and s_market_id=8 - group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) - select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid - from ssales - where i_color = 'beige' - group by c_last_name - ,c_first_name - ,s_store_name - having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) - order by c_last_name - ,c_first_name - ,s_store_name - ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.groovy deleted file mode 100644 index ca4fea0419c96e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query25.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_25 ''' - explain shape plan - - - -select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,sum(ss_net_profit) as store_sales_profit - ,sum(sr_net_loss) as store_returns_loss - ,sum(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 2000 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 2000 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.groovy deleted file mode 100644 index a5e0a760a24706..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query26.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_26 ''' - explain shape plan - - - - -select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'S' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.groovy deleted file mode 100644 index e0888d00f3785a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query27.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_27 ''' - explain shape plan - - - -select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'F' and - cd_marital_status = 'D' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('MO','AL', 'MI', 'TN', 'LA', 'SC') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.groovy deleted file mode 100644 index c2cd90ef2d1dc6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query28.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_28 ''' - explain shape plan - - - - -select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 131 and 131+10 - or ss_coupon_amt between 16798 and 16798+1000 - or ss_wholesale_cost between 25 and 25+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 145 and 145+10 - or ss_coupon_amt between 14792 and 14792+1000 - or ss_wholesale_cost between 46 and 46+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 150 and 150+10 - or ss_coupon_amt between 6600 and 6600+1000 - or ss_wholesale_cost between 9 and 9+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 91 and 91+10 - or ss_coupon_amt between 13493 and 13493+1000 - or ss_wholesale_cost between 36 and 36+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 0 and 0+10 - or ss_coupon_amt between 7629 and 7629+1000 - or ss_wholesale_cost between 6 and 6+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 15257 and 15257+1000 - or ss_wholesale_cost between 31 and 31+20)) B6 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.groovy deleted file mode 100644 index 238f0e434d9772..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query29.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_29 ''' - explain shape plan - - - - -select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,avg(ss_quantity) as store_sales_quantity - ,avg(sr_return_quantity) as store_returns_quantity - ,avg(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1999,1999+1,1999+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.groovy deleted file mode 100644 index de3b2bbbece903..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query3.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_ds_shape_3 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.groovy deleted file mode 100644 index a39c25ea9bd016..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query30.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_30 ''' - explain shape plan - - - - -with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2002 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'IN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.groovy deleted file mode 100644 index 12fb0f766c7385..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query31.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_31 ''' - explain shape plan - - - - -with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 2000 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 2000 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 2000 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 2000 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 2000 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =2000 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by web_q1_q2_increase; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.groovy deleted file mode 100644 index 1132ffd6bea511..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query32.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_32 ''' - explain shape plan - - - - -select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 29 -and i_item_sk = cs_item_sk -and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.groovy deleted file mode 100644 index 78587586a51685..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query33.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_33 ''' - explain shape plan - - - - -with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.groovy deleted file mode 100644 index 379b29ccdf7c59..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query34.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_34 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_county in ('Ziebach County','Daviess County','Walker County','Richland County', - 'Barrow County','Franklin Parish','Williamson County','Luce County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.groovy deleted file mode 100644 index 755ea40079011e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query35.groovy +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_35 ''' - explain shape plan - - - - -select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - max(cd_dep_count), - sum(cd_dep_count), - max(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - max(cd_dep_employed_count), - sum(cd_dep_employed_count), - max(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - max(cd_dep_college_count), - sum(cd_dep_college_count), - max(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.groovy deleted file mode 100644 index def05c702aba48..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query36.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_36 ''' - explain shape plan - - - - -select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2002 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('SD','TN','GA','SC', - 'MO','AL','MI','OH') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.groovy deleted file mode 100644 index 82ce00de92bd42..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query37.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_37 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 45 and 45 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-02-21' as date) and (cast('1999-02-21' as date) + interval 60 day) - and i_manufact_id in (856,707,1000,747) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.groovy deleted file mode 100644 index d2175cb65cc79a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query38.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_38 ''' - explain shape plan - - - - -select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 -) hot_cust -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.groovy deleted file mode 100644 index 9dfcaf127d58d0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query39.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_39 ''' - explain shape plan - - - - -with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =1998 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.groovy deleted file mode 100644 index 0470f27267ebc9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query4.groovy +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_4 ''' - explain shape plan - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.groovy deleted file mode 100644 index 55fdce027dc4a7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query40.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_40 ''' - explain shape plan - - - -select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-04-02' as date) - interval 30 day) - and (cast ('2001-04-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.groovy deleted file mode 100644 index 3604cfa49f66ac..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query41.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_41 ''' - explain shape plan - - - - -select distinct(i_product_name) - from item i1 - where i_manufact_id between 748 and 748+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'gainsboro' or i_color = 'aquamarine') and - (i_units = 'Ounce' or i_units = 'Dozen') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'chiffon' or i_color = 'violet') and - (i_units = 'Ton' or i_units = 'Pound') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'chartreuse' or i_color = 'blue') and - (i_units = 'Each' or i_units = 'Oz') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'tan' or i_color = 'dodger') and - (i_units = 'Bunch' or i_units = 'Tsp') and - (i_size = 'medium' or i_size = 'economy') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'blanched' or i_color = 'tomato') and - (i_units = 'Tbl' or i_units = 'Case') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'almond' or i_color = 'lime') and - (i_units = 'Box' or i_units = 'Dram') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'peru' or i_color = 'saddle') and - (i_units = 'Pallet' or i_units = 'Gram') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'indian' or i_color = 'spring') and - (i_units = 'Unknown' or i_units = 'Carton') and - (i_size = 'medium' or i_size = 'economy') - )))) > 0 - order by i_product_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.groovy deleted file mode 100644 index 1b2aad016d79eb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query42.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_42 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=2002 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.groovy deleted file mode 100644 index f75ac2dae53e27..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query43.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_43 ''' - explain shape plan - - - - -select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.groovy deleted file mode 100644 index bccd8304610749..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query44.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_44 ''' - explain shape plan - - - - -select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.groovy deleted file mode 100644 index 54bdeaa5cd2e5e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query45.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_45 ''' - explain shape plan - - - - -select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.groovy deleted file mode 100644 index 05edaa7a4dcb79..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query46.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_46 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 6 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (1999,1999+1,1999+2) - and store.s_city in ('Five Points','Centerville','Oak Grove','Fairview','Liberty') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.groovy deleted file mode 100644 index 6bf7243f0e2cea..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query47.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_47 ''' - explain shape plan - - -with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.groovy deleted file mode 100644 index 0ccf5809bc6d42..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query48.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_48 ''' - explain shape plan - - - - -select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 1999 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'U' - and - cd_education_status = 'Primary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'W' - and - cd_education_status = 'College' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'MN', 'IA') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('VA', 'IL', 'TX') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MI', 'WI', 'IN') - and ss_net_profit between 50 and 25000 - ) - ) -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.groovy deleted file mode 100644 index 4a9ab3f4a2ba4f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query49.groovy +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_49 ''' - explain shape plan - - - - -select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.groovy deleted file mode 100644 index 7f2e92f5fe7d8f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query5.groovy +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_5 ''' - explain shape plan - - - - -with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.groovy deleted file mode 100644 index 1cc8aa811d2dc0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query50.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_50 ''' - explain shape plan - - - - -select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.groovy deleted file mode 100644 index 3fb187cdc3ec0e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query51.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_51 ''' - explain shape plan - - - - -WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.groovy deleted file mode 100644 index 00f0d1cdce76b9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query52.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_52 ''' - explain shape plan - - - - -select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2002 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.groovy deleted file mode 100644 index 0bbb9f8a7aa7aa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query53.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_53 ''' - explain shape plan - - - - -select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1200,1200+1,1200+2,1200+3,1200+4,1200+5,1200+6,1200+7,1200+8,1200+9,1200+10,1200+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.groovy deleted file mode 100644 index cf2a0806fb7a76..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query54.groovy +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_54 ''' - explain shape plan - - - - -with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Women' - and i_class = 'maternity' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 5 - and d_year = 1998 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1998 and d_moy = 5) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1998 and d_moy = 5) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.groovy deleted file mode 100644 index 440cc0a53a375b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query55.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_55 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=100 - and d_moy=12 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.groovy deleted file mode 100644 index bd8405821ee155..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query56.groovy +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_56 ''' - explain shape plan - - - - -with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.groovy deleted file mode 100644 index 4e7d37d147ebed..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query57.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_57 ''' - explain shape plan - - - -with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 1999 or - ( d_year = 1999-1 and d_moy =12) or - ( d_year = 1999+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_brand - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 1999 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.groovy deleted file mode 100644 index 29451458a6f266..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query58.groovy +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_58 ''' - explain shape plan - - - - -with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.groovy deleted file mode 100644 index c8b985a5290cdf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query59.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_59 ''' - explain shape plan - - - -with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196 and 1196 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196+ 12 and 1196 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.groovy deleted file mode 100644 index 8455347f43c105..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query6.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - // TODO: uncomment following line to get better shape - // sql 'set max_join_number_bushy_tree=6' - - qt_ds_shape_6 ''' - explain shape plan - - - - -select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.groovy deleted file mode 100644 index 02e6dedaa3c90f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query60.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_60 ''' - explain shape plan - - - - -with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.groovy deleted file mode 100644 index 035daafadec3da..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query61.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_61 ''' - explain shape plan - - - - -select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) all_sales -order by promotions, total -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.groovy deleted file mode 100644 index 820da22663fc33..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query62.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_62 ''' - explain shape plan - - - - -select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.groovy deleted file mode 100644 index 5873e1c2753d3d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query63.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_63 ''' - explain shape plan - - - - -select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1181,1181+1,1181+2,1181+3,1181+4,1181+5,1181+6,1181+7,1181+8,1181+9,1181+10,1181+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.groovy deleted file mode 100644 index b4566e90f9575c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query64.groovy +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds64 = ''' - with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), - cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('blanched','medium','brown','chocolate','burlywood','drab') and - i_current_price between 23 and 23 + 10 and - i_current_price between 23 + 1 and 23 + 15 - group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year - ) - select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt - from cross_sales cs1,cross_sales cs2 - where cs1.item_sk=cs2.item_sk and - cs1.syear = 2001 and - cs2.syear = 2001 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip - order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1; - - ''' - - qt_ds_shape_64 'explain shape plan ' + ds64 - -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.groovy deleted file mode 100644 index 0fa05938bcf3d7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query65.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_65 ''' - explain shape plan - - - - -select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.groovy deleted file mode 100644 index 407bb39f6117ff..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query66.groovy +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_66 ''' - explain shape plan - - - - -select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 and 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 AND 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.groovy deleted file mode 100644 index d337474611f5cb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query67.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_67 ''' - explain shape plan - - - - -select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1206 and 1206+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.groovy deleted file mode 100644 index b234cbee914806..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query68.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_68 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= -1) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Pleasant Hill','Five Points') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.groovy deleted file mode 100644 index f1b09b13229f3b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query69.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_69 ''' - explain shape plan - - - - -select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('TX','VA','MI') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.groovy deleted file mode 100644 index 3e2f47b9a374d1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query7.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_7 ''' - explain shape plan - - - - -select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.groovy deleted file mode 100644 index 5e6196cec5ec14..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query70.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_70 ''' - explain shape plan - - - - -select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1213 and 1213+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1213 and 1213+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.groovy deleted file mode 100644 index dedf696d632767..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query71.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_71 ''' - explain shape plan - - - - -select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=1998 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - ; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.groovy deleted file mode 100644 index 8029b93fe637cd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query72.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_72 ''' - explain shape plan - - - - -select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and d3.d_date > d1.d_date + 5 - and hd_buy_potential = '501-1000' - and d1.d_year = 2002 - and cd_marital_status = 'W' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.groovy deleted file mode 100644 index 4dba313ee28c5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query73.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_73 ''' - explain shape plan - - - - -select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '501-1000' or - household_demographics.hd_buy_potential = 'Unknown') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Fairfield County','Walker County','Daviess County','Barrow County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.groovy deleted file mode 100644 index eebd2286ded0a8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query74.groovy +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_74 ''' - explain shape plan - - - -with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 2,1,3 -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.groovy deleted file mode 100644 index f1556bbc51f91f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query75.groovy +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_75 ''' - explain shape plan - - - - -WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Home') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=1999 - AND prev_yr.d_year=1999-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.groovy deleted file mode 100644 index 9410eb8c8341c2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query76.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_76 ''' - explain shape plan - - - - -select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_hdemo_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_bill_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_bill_addr_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_warehouse_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_warehouse_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.groovy deleted file mode 100644 index 54b615fd67f852..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query77.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_77 ''' - explain shape plan - - - - -with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.groovy deleted file mode 100644 index 6ad5e1a8be9e44..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query78.groovy +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_78 ''' - explain shape plan - - - - -with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null - group by d_year, ss_item_sk, ss_customer_sk - ) - select -ss_item_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2000 -order by - ss_item_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.groovy deleted file mode 100644 index fb04d65650dd31..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query79.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_79 ''' - explain shape plan - - - - -select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count > 4) - and date_dim.d_dow = 1 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.groovy deleted file mode 100644 index 821ffb2cf236a8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query8.groovy +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_8 ''' - explain shape plan - - - - -select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.groovy deleted file mode 100644 index 8ab44b3653b5bf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query80.groovy +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_80 ''' - explain shape plan - - - - -with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.groovy deleted file mode 100644 index 5a2fa3ae7b547d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query81.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_81 ''' - explain shape plan - - - - -with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2002 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'CA' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.groovy deleted file mode 100644 index 39d87006a1d80f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query82.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_82 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 17 and 17+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-07-09' as date) and (cast('1999-07-09' as date) + interval 60 day) - and i_manufact_id in (639,169,138,339) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.groovy deleted file mode 100644 index 119fe440d9a756..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query83.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_83 ''' - explain shape plan - - - - -with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.groovy deleted file mode 100644 index fb0804cececcaa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query84.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_84 ''' - explain shape plan - - - - -select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Oakwood' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 5806 - and ib_upper_bound <= 5806 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.groovy deleted file mode 100644 index 517301a858d109..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query85.groovy +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_85 ''' - explain shape plan - - - - -select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 2000 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'M' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = '4 yr Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Secondary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'W' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('FL', 'TX', 'DE') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('IN', 'ND', 'ID') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('MT', 'IL', 'OH') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.groovy deleted file mode 100644 index c2594db8e9e045..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query86.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_86 ''' - explain shape plan - - - - -select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1224 and 1224+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.groovy deleted file mode 100644 index 51b50c928aa507..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query87.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_87 ''' - explain shape plan - - - - -select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) -) cool_cust -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.groovy deleted file mode 100644 index 3e23dc65efdc5a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query88.groovy +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_88 ''' - explain shape plan - - - - -select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.groovy deleted file mode 100644 index 070a9145a60f00..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query89.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_89 ''' - explain shape plan - - - - -select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (1999) and - ((i_category in ('Jewelry','Shoes','Electronics') and - i_class in ('semi-precious','athletic','portable') - ) - or (i_category in ('Men','Music','Women') and - i_class in ('accessories','rock','maternity') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.groovy deleted file mode 100644 index 8afa6c0b913a4c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query9.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - qt_ds_shape_9 ''' - explain shape plan - - - - -select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 2972190 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 4505785 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 1575726 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 3188917 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 3525216 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.groovy deleted file mode 100644 index 359090a313e9cd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query90.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_90 ''' - explain shape plan - - - - -select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 10 and 10+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 16 and 16+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.groovy deleted file mode 100644 index a8d4b4895bf616..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query91.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_91 ''' - explain shape plan - - - - -select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2001 -and d_moy = 11 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like '1001-5000%' -and ca_gmt_offset = -6 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.groovy deleted file mode 100644 index b314dfb25374bf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query92.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_92 ''' - explain shape plan - - - - -select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 320 -and i_item_sk = ws_item_sk -and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.groovy deleted file mode 100644 index d504874817236c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query93.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_93 ''' - explain shape plan - - - - -select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'duplicate purchase') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.groovy deleted file mode 100644 index dd66d44f3efb67..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query94.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_94 ''' - explain shape plan - - - - -select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2000-2-01' and - (cast('2000-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.groovy deleted file mode 100644 index 9472af9d687cdc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query95.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=12" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_95 ''' - explain shape plan - - -with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '1999-2-01' and - (cast('1999-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'NC' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.groovy deleted file mode 100644 index 9973173945983f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query96.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_96 ''' - explain shape plan - - - - -select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 3 - and store.s_store_name = 'ese' -order by count(*) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.groovy deleted file mode 100644 index 7038a8179b8f87..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query97.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=false; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - qt_ds_shape_97 ''' - explain shape plan - - - - -with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.groovy deleted file mode 100644 index cb803a7032b4d8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query98.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_98 ''' - explain shape plan - - - - -select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Sports', 'Music', 'Shoes') - and ss_sold_date_sk = d_date_sk - and d_date between cast('2002-05-20' as date) - and (cast('2002-05-20' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.groovy deleted file mode 100644 index 0806ca2a7bdecd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/no_stats_shape/query99.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql 'set forbid_unknown_col_stats=false' - sql 'set enable_stats=false' - sql "set runtime_filter_type=8" - sql 'set broadcast_row_count_limit = 30000000' - sql 'set enable_nereids_timeout = false' - sql 'SET enable_pipeline_engine = true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_ds_shape_99 ''' - explain shape plan - - - - -select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1224 and 1224 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100; - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query1.groovy deleted file mode 100644 index 7b93b538c6a947..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query1.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'SD' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100""" - qt_ds_shape_1 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query10.groovy deleted file mode 100644 index 8ebc3d34f8c92b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query10.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Storey County','Marquette County','Warren County','Cochran County','Kandiyohi County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 ANd 1+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100""" - qt_ds_shape_10 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query11.groovy deleted file mode 100644 index 1eba17bdfa959d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query11.groovy +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 2001 - and t_s_secyear.dyear = 2001+1 - and t_w_firstyear.dyear = 2001 - and t_w_secyear.dyear = 2001+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_11 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query12.groovy deleted file mode 100644 index 3de0eddfeacefb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query12.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Books', 'Sports', 'Men') - and ws_sold_date_sk = d_date_sk - and d_date between cast('1998-04-06' as date) - and (cast('1998-04-06' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_12 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query13.groovy deleted file mode 100644 index 4bf64c23d264ea..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query13.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Unknown' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'S' - and cd_education_status = 'College' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = '4 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('SD', 'KS', 'MI') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MO', 'ND', 'CO') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('NH', 'OH', 'TX') - and ss_net_profit between 50 and 250 - )) -""" - qt_ds_shape_13 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query14.groovy deleted file mode 100644 index 29d8d419dd1536..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query14.groovy +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 2000 AND 2000 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 2000 AND 2000 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 2000 AND 2000 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), - avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100""" - qt_ds_shape_14 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query15.groovy deleted file mode 100644 index d2d371da6c95c3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query15.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100""" - qt_ds_shape_15 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query16.groovy deleted file mode 100644 index ba5e1073fccd59..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query16.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'WV' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Ziebach County','Luce County','Richland County','Daviess County', - 'Barrow County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100""" - qt_ds_shape_16 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query17.groovy deleted file mode 100644 index 3d6fe36e5b0f89..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query17.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100""" - qt_ds_shape_17 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query18.groovy deleted file mode 100644 index ee8f64d80ea204..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query18.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Advanced Degree' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (10,7,8,4,1,2) and - d_year = 1998 and - ca_state in ('WA','GA','NC' - ,'ME','WY','OK','IN') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100""" - qt_ds_shape_18 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query19.groovy deleted file mode 100644 index 8461069c183e93..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query19.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=2 - and d_moy=12 - and d_year=1999 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 """ - qt_ds_shape_19 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query2.groovy deleted file mode 100644 index 61574ba16f58fa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query2.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1""" - qt_ds_shape_2 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query20.groovy deleted file mode 100644 index 321e1d913dc48f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query20.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Shoes', 'Books', 'Women') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-01-26' as date) - and (cast('2002-01-26' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_20 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query21.groovy deleted file mode 100644 index feff18960306b0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query21.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('2002-02-27' as date) - interval 30 day) - and (cast ('2002-02-27' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100""" - qt_ds_shape_21 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query22.groovy deleted file mode 100644 index bfb87e1402b41d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query22.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1188 and 1188 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100""" - qt_ds_shape_22 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query23.groovy deleted file mode 100644 index c97e2fb303e1f2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query23.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - - def ds = """with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100""" - qt_ds_shape_23 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query24.groovy deleted file mode 100644 index cfc582e615208f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query24.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_profit) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=8 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'beige' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name -""" - qt_ds_shape_24 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query25.groovy deleted file mode 100644 index 1227c775eeb559..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query25.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,sum(ss_net_profit) as store_sales_profit - ,sum(sr_net_loss) as store_returns_loss - ,sum(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 2000 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 2000 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_25 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query26.groovy deleted file mode 100644 index e80f67d935c302..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query26.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'S' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_26 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query27.groovy deleted file mode 100644 index ac99f5918c6b3f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query27.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'F' and - cd_marital_status = 'D' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('MO','AL', 'MI', 'TN', 'LA', 'SC') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100""" - qt_ds_shape_27 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query28.groovy deleted file mode 100644 index 6d72e2f2e6142e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query28.groovy +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 131 and 131+10 - or ss_coupon_amt between 16798 and 16798+1000 - or ss_wholesale_cost between 25 and 25+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 145 and 145+10 - or ss_coupon_amt between 14792 and 14792+1000 - or ss_wholesale_cost between 46 and 46+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 150 and 150+10 - or ss_coupon_amt between 6600 and 6600+1000 - or ss_wholesale_cost between 9 and 9+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 91 and 91+10 - or ss_coupon_amt between 13493 and 13493+1000 - or ss_wholesale_cost between 36 and 36+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 0 and 0+10 - or ss_coupon_amt between 7629 and 7629+1000 - or ss_wholesale_cost between 6 and 6+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 15257 and 15257+1000 - or ss_wholesale_cost between 31 and 31+20)) B6 -limit 100""" - qt_ds_shape_28 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query29.groovy deleted file mode 100644 index 76d0424a831ed2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query29.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,avg(ss_quantity) as store_sales_quantity - ,avg(sr_return_quantity) as store_returns_quantity - ,avg(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1999,1999+1,1999+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_29 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query3.groovy deleted file mode 100644 index fdaa33c8b8f68b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query3.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100""" - qt_ds_shape_3 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query30.groovy deleted file mode 100644 index a5a28b942da603..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query30.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2002 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'IN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100""" - qt_ds_shape_30 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query31.groovy deleted file mode 100644 index a9e8e54f2e03cb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query31.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 2000 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 2000 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 2000 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 2000 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 2000 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =2000 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by web_q1_q2_increase""" - qt_ds_shape_31 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query32.groovy deleted file mode 100644 index ff511671303007..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query32.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - - def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 29 -and i_item_sk = cs_item_sk -and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100""" - qt_ds_shape_32 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query33.groovy deleted file mode 100644 index 139221711c9092..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query33.groovy +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100""" - qt_ds_shape_33 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query34.groovy deleted file mode 100644 index 2dfb1a3fab0432..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query34.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_county in ('Ziebach County','Daviess County','Walker County','Richland County', - 'Barrow County','Franklin Parish','Williamson County','Luce County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number""" - qt_ds_shape_34 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query35.groovy deleted file mode 100644 index 7c579674b5bdc2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query35.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - max(cd_dep_count), - sum(cd_dep_count), - max(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - max(cd_dep_employed_count), - sum(cd_dep_employed_count), - max(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - max(cd_dep_college_count), - sum(cd_dep_college_count), - max(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100""" - qt_ds_shape_35 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query36.groovy deleted file mode 100644 index b18d481ad47615..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query36.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2002 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('SD','TN','GA','SC', - 'MO','AL','MI','OH') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100""" - qt_ds_shape_36 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query37.groovy deleted file mode 100644 index af1b5737791539..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query37.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 45 and 45 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-02-21' as date) and (cast('1999-02-21' as date) + interval 60 day) - and i_manufact_id in (856,707,1000,747) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_37 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query38.groovy deleted file mode 100644 index 861df1065d10a2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query38.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 -) hot_cust -limit 100""" - qt_ds_shape_38 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query39.groovy deleted file mode 100644 index dde7b5e771220b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query39.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =1998 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov""" - qt_ds_shape_39 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query4.groovy deleted file mode 100644 index 950cc47c083427..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query4.groovy +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_4 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query40.groovy deleted file mode 100644 index ee2016698d4fc6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query40.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-04-02' as date) - interval 30 day) - and (cast ('2001-04-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100""" - qt_ds_shape_40 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query41.groovy deleted file mode 100644 index 9e042b7b73e7ad..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query41.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select distinct(i_product_name) - from item i1 - where i_manufact_id between 748 and 748+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'gainsboro' or i_color = 'aquamarine') and - (i_units = 'Ounce' or i_units = 'Dozen') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'chiffon' or i_color = 'violet') and - (i_units = 'Ton' or i_units = 'Pound') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'chartreuse' or i_color = 'blue') and - (i_units = 'Each' or i_units = 'Oz') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'tan' or i_color = 'dodger') and - (i_units = 'Bunch' or i_units = 'Tsp') and - (i_size = 'medium' or i_size = 'economy') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'blanched' or i_color = 'tomato') and - (i_units = 'Tbl' or i_units = 'Case') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'almond' or i_color = 'lime') and - (i_units = 'Box' or i_units = 'Dram') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'peru' or i_color = 'saddle') and - (i_units = 'Pallet' or i_units = 'Gram') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'indian' or i_color = 'spring') and - (i_units = 'Unknown' or i_units = 'Carton') and - (i_size = 'medium' or i_size = 'economy') - )))) > 0 - order by i_product_name - limit 100""" - qt_ds_shape_41 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query42.groovy deleted file mode 100644 index a801f21595be87..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query42.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=2002 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 """ - qt_ds_shape_42 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query43.groovy deleted file mode 100644 index 65eb573d9f663f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query43.groovy +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100""" - qt_ds_shape_43 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query44.groovy deleted file mode 100644 index afa0b2691d9fe0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query44.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100""" - qt_ds_shape_44 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query45.groovy deleted file mode 100644 index 0655abe5962517..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query45.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100""" - qt_ds_shape_45 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query46.groovy deleted file mode 100644 index 56432e72023569..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query46.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 6 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (1999,1999+1,1999+2) - and store.s_city in ('Five Points','Centerville','Oak Grove','Fairview','Liberty') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100""" - qt_ds_shape_46 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query47.groovy deleted file mode 100644 index 3cef5880701fc7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query47.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_47 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query48.groovy deleted file mode 100644 index 788a32a3d9af4f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query48.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 1999 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'U' - and - cd_education_status = 'Primary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'W' - and - cd_education_status = 'College' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'MN', 'IA') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('VA', 'IL', 'TX') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MI', 'WI', 'IN') - and ss_net_profit between 50 and 25000 - ) - ) -""" - qt_ds_shape_48 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query49.groovy deleted file mode 100644 index 08c2e111116002..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query49.groovy +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100""" - qt_ds_shape_49 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query5.groovy deleted file mode 100644 index d2bb54dfc468e3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query5.groovy +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_5 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query50.groovy deleted file mode 100644 index b13bfe4e95837e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query50.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100""" - qt_ds_shape_50 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query51.groovy deleted file mode 100644 index 3db2f3a5abb815..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query51.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100""" - qt_ds_shape_51 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query52.groovy deleted file mode 100644 index 87393c0825fb46..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query52.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2002 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 """ - qt_ds_shape_52 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query53.groovy deleted file mode 100644 index 8c249ff3044e8e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query53.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1200,1200+1,1200+2,1200+3,1200+4,1200+5,1200+6,1200+7,1200+8,1200+9,1200+10,1200+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100""" - qt_ds_shape_53 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query54.groovy deleted file mode 100644 index 083e4d648d299b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query54.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Women' - and i_class = 'maternity' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 5 - and d_year = 1998 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1998 and d_moy = 5) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1998 and d_moy = 5) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100""" - qt_ds_shape_54 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query55.groovy deleted file mode 100644 index dab3d5e7cde7f4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query55.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=100 - and d_moy=12 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 """ - qt_ds_shape_55 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query56.groovy deleted file mode 100644 index 2143dcb2dd4795..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query56.groovy +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100""" - qt_ds_shape_56 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query57.groovy deleted file mode 100644 index e3e2a1ad57e5b9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query57.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 1999 or - ( d_year = 1999-1 and d_moy =12) or - ( d_year = 1999+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_brand - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 1999 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_57 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query58.groovy deleted file mode 100644 index c160d0ada1ba91..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query58.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100""" - qt_ds_shape_58 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query59.groovy deleted file mode 100644 index 79d68ee081ff36..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query59.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196 and 1196 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196+ 12 and 1196 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100""" - qt_ds_shape_59 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query6.groovy deleted file mode 100644 index 9a18350057d29c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query6.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100""" - qt_ds_shape_6 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query60.groovy deleted file mode 100644 index a778c869418df6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query60.groovy +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100""" - qt_ds_shape_60 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query61.groovy deleted file mode 100644 index 3efb6f2051c734..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query61.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) all_sales -order by promotions, total -limit 100""" - qt_ds_shape_61 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query62.groovy deleted file mode 100644 index d9e8e6a5638e42..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query62.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100""" - qt_ds_shape_62 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query63.groovy deleted file mode 100644 index ef7bfcb661924c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query63.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1181,1181+1,1181+2,1181+3,1181+4,1181+5,1181+6,1181+7,1181+8,1181+9,1181+10,1181+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100""" - qt_ds_shape_63 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query64.groovy deleted file mode 100644 index d0c4ada0d677ae..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query64.groovy +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('blanched','medium','brown','chocolate','burlywood','drab') and - i_current_price between 23 and 23 + 10 and - i_current_price between 23 + 1 and 23 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 2001 and - cs2.syear = 2001 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1""" - qt_ds_shape_64 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query65.groovy deleted file mode 100644 index 13ff66a236f908..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query65.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100""" - qt_ds_shape_65 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query66.groovy deleted file mode 100644 index 3ad2fbecfa9cb7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query66.groovy +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 and 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 AND 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100""" - qt_ds_shape_66 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query67.groovy deleted file mode 100644 index 60725af0b19fce..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query67.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1206 and 1206+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100""" - qt_ds_shape_67 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query68.groovy deleted file mode 100644 index c2d21e2d218559..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query68.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= -1) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Pleasant Hill','Five Points') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100""" - qt_ds_shape_68 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query69.groovy deleted file mode 100644 index 4ce0907dd769b2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query69.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('TX','VA','MI') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100""" - qt_ds_shape_69 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query7.groovy deleted file mode 100644 index 015f3504c6f0a5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query7.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_7 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query70.groovy deleted file mode 100644 index 34f11394e63dcf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query70.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1213 and 1213+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1213 and 1213+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100""" - qt_ds_shape_70 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query71.groovy deleted file mode 100644 index a6bd9c8884d29b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query71.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=1998 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - """ - qt_ds_shape_71 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query72.groovy deleted file mode 100644 index 6e7360ed7ddfd3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query72.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select /*+ SET_VAR(max_join_number_bushy_tree=10, memo_max_group_expression_size=15000)*/ i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '501-1000' - and d1.d_year = 2002 - and cd_marital_status = 'W' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100""" - qt_ds_shape_72 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query73.groovy deleted file mode 100644 index 5b02c9a994842e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query73.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '501-1000' or - household_demographics.hd_buy_potential = 'Unknown') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Fairfield County','Walker County','Daviess County','Barrow County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc""" - qt_ds_shape_73 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query74.groovy deleted file mode 100644 index 06cd139cb4e834..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query74.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 2,1,3 -limit 100""" - qt_ds_shape_74 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query75.groovy deleted file mode 100644 index 9397c42c127d46..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query75.groovy +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Home') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=1999 - AND prev_yr.d_year=1999-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100""" - qt_ds_shape_75 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query76.groovy deleted file mode 100644 index 9b0c5ca496c1b1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query76.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_hdemo_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_bill_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_bill_addr_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_warehouse_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_warehouse_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100""" - qt_ds_shape_76 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query77.groovy deleted file mode 100644 index 72f7a552e13b3d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query77.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_77 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query78.groovy deleted file mode 100644 index aa17385358d0d7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query78.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null - group by d_year, ss_item_sk, ss_customer_sk - ) - select -ss_item_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2000 -order by - ss_item_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100""" - qt_ds_shape_78 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query79.groovy deleted file mode 100644 index 34f613e0974752..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query79.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count > 4) - and date_dim.d_dow = 1 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100""" - qt_ds_shape_79 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query8.groovy deleted file mode 100644 index 5fbbac6e3d11fd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query8.groovy +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - def ds = """select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100""" - qt_ds_shape_8 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query80.groovy deleted file mode 100644 index 6fadaafdb42a4a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query80.groovy +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_80 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query81.groovy deleted file mode 100644 index c7233532657d5a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query81.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2002 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'CA' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100""" - qt_ds_shape_81 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query82.groovy deleted file mode 100644 index 94ddfc873b1053..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query82.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 17 and 17+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-07-09' as date) and (cast('1999-07-09' as date) + interval 60 day) - and i_manufact_id in (639,169,138,339) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_82 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query83.groovy deleted file mode 100644 index d4696ed1a2350e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query83.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100""" - qt_ds_shape_83 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query84.groovy deleted file mode 100644 index 1cea65929d1ea0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query84.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Oakwood' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 5806 - and ib_upper_bound <= 5806 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100""" - qt_ds_shape_84 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query85.groovy deleted file mode 100644 index 5455c49f9d813c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query85.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 2000 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'M' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = '4 yr Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Secondary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'W' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('FL', 'TX', 'DE') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('IN', 'ND', 'ID') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('MT', 'IL', 'OH') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100""" - qt_ds_shape_85 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query86.groovy deleted file mode 100644 index 9ba88449ca98f7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query86.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1224 and 1224+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100""" - qt_ds_shape_86 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query87.groovy deleted file mode 100644 index 391627f33adaac..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query87.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) -) cool_cust -""" - qt_ds_shape_87 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query88.groovy deleted file mode 100644 index f7a330a6963eea..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query88.groovy +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -""" - qt_ds_shape_88 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query89.groovy deleted file mode 100644 index b0f23ecd8a66b1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query89.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (1999) and - ((i_category in ('Jewelry','Shoes','Electronics') and - i_class in ('semi-precious','athletic','portable') - ) - or (i_category in ('Men','Music','Women') and - i_class in ('accessories','rock','maternity') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100""" - qt_ds_shape_89 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query9.groovy deleted file mode 100644 index 76f5a7ed84c454..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query9.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - def ds = """select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 2972190 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 4505785 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 1575726 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 3188917 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 3525216 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -""" - qt_ds_shape_9 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query90.groovy deleted file mode 100644 index 5023a13e1f25dc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query90.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 10 and 10+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 16 and 16+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100""" - qt_ds_shape_90 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query91.groovy deleted file mode 100644 index 199e374915a32b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query91.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2001 -and d_moy = 11 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like '1001-5000%' -and ca_gmt_offset = -6 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc""" - qt_ds_shape_91 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query92.groovy deleted file mode 100644 index a8d060e557c4cd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query92.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 320 -and i_item_sk = ws_item_sk -and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100""" - qt_ds_shape_92 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query93.groovy deleted file mode 100644 index fcbb8872980f5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query93.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'duplicate purchase') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100""" - qt_ds_shape_93 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query94.groovy deleted file mode 100644 index 81e63de5b22b05..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query94.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2000-2-01' and - (cast('2000-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_94 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query95.groovy deleted file mode 100644 index be8b36d1c9c253..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query95.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '1999-2-01' and - (cast('1999-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'NC' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_95 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query96.groovy deleted file mode 100644 index c3d1721650f207..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query96.groovy +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 3 - and store.s_store_name = 'ese' -order by count(*) -limit 100""" - qt_ds_shape_96 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query97.groovy deleted file mode 100644 index 5ea649b1046797..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query97.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=true; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100""" - qt_ds_shape_97 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query98.groovy deleted file mode 100644 index ac78b3adcf5da7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query98.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Sports', 'Music', 'Shoes') - and ss_sold_date_sk = d_date_sk - and d_date between cast('2002-05-20' as date) - and (cast('2002-05-20' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio""" - qt_ds_shape_98 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query99.groovy deleted file mode 100644 index 1e867ca4987f0f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/rf_prune/query99.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set runtime_filter_type=8' - sql 'set enable_runtime_filter_prune=true' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1224 and 1224 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100""" - qt_ds_shape_99 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query1.groovy deleted file mode 100644 index cdaa360eafaacd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query1.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'SD' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100""" - qt_ds_shape_1 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query10.groovy deleted file mode 100644 index 7e492e1ec8cec8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query10.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Storey County','Marquette County','Warren County','Cochran County','Kandiyohi County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 ANd 1+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 1 and 1+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100""" - qt_ds_shape_10 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query11.groovy deleted file mode 100644 index ab874a3853f8da..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query11.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 2001 - and t_s_secyear.dyear = 2001+1 - and t_w_firstyear.dyear = 2001 - and t_w_secyear.dyear = 2001+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_11 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query12.groovy deleted file mode 100644 index 6fc3a9149599d4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query12.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Books', 'Sports', 'Men') - and ws_sold_date_sk = d_date_sk - and d_date between cast('1998-04-06' as date) - and (cast('1998-04-06' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_12 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query13.groovy deleted file mode 100644 index 152c4132629c63..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query13.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Unknown' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'S' - and cd_education_status = 'College' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = '4 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('SD', 'KS', 'MI') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MO', 'ND', 'CO') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('NH', 'OH', 'TX') - and ss_net_profit between 50 and 250 - )) -""" - qt_ds_shape_13 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query14.groovy deleted file mode 100644 index c621b72b18c4e4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query14.groovy +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 2000 AND 2000 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 2000 AND 2000 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 2000 AND 2000 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), - avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 2000 and 2000 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100""" - qt_ds_shape_14 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query15.groovy deleted file mode 100644 index 48dc7923ebaa3f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query15.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100""" - qt_ds_shape_15 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query16.groovy deleted file mode 100644 index 16103efe13173f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query16.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'WV' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Ziebach County','Luce County','Richland County','Daviess County', - 'Barrow County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100""" - qt_ds_shape_16 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query17.groovy deleted file mode 100644 index 5cafc500be9674..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query17.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100""" - qt_ds_shape_17 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query18.groovy deleted file mode 100644 index 42bc9b85e8a581..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query18.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Advanced Degree' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (10,7,8,4,1,2) and - d_year = 1998 and - ca_state in ('WA','GA','NC' - ,'ME','WY','OK','IN') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100""" - qt_ds_shape_18 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query19.groovy deleted file mode 100644 index 102142a9975b04..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query19.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=2 - and d_moy=12 - and d_year=1999 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 """ - qt_ds_shape_19 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query2.groovy deleted file mode 100644 index c8afcaca91ae45..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query2.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1""" - qt_ds_shape_2 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query20.groovy deleted file mode 100644 index e64fd0a4d64957..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query20.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Shoes', 'Books', 'Women') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-01-26' as date) - and (cast('2002-01-26' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_20 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query21.groovy deleted file mode 100644 index 8c0d46200b8da3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query21.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('2002-02-27' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('2002-02-27' as date) - interval 30 day) - and (cast ('2002-02-27' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100""" - qt_ds_shape_21 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query22.groovy deleted file mode 100644 index ab0afa82a7f8b1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query22.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1188 and 1188 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100""" - qt_ds_shape_22 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query23.groovy deleted file mode 100644 index b09ccf2079a525..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query23.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), - best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 5 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100""" - qt_ds_shape_23 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query24.groovy deleted file mode 100644 index 7f8d9159cd3ab2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query24.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_profit) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=8 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'beige' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name -""" - qt_ds_shape_24 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query25.groovy deleted file mode 100644 index 7d6b7930cd82f2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query25.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,sum(ss_net_profit) as store_sales_profit - ,sum(sr_net_loss) as store_returns_loss - ,sum(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 2000 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 2000 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_25 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query26.groovy deleted file mode 100644 index a269a64d6600b9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query26.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'S' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_26 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query27.groovy deleted file mode 100644 index e02f0e1baf82f2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query27.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'F' and - cd_marital_status = 'D' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('MO','AL', 'MI', 'TN', 'LA', 'SC') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100""" - qt_ds_shape_27 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query28.groovy deleted file mode 100644 index cbd3938230d209..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query28.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 131 and 131+10 - or ss_coupon_amt between 16798 and 16798+1000 - or ss_wholesale_cost between 25 and 25+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 145 and 145+10 - or ss_coupon_amt between 14792 and 14792+1000 - or ss_wholesale_cost between 46 and 46+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 150 and 150+10 - or ss_coupon_amt between 6600 and 6600+1000 - or ss_wholesale_cost between 9 and 9+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 91 and 91+10 - or ss_coupon_amt between 13493 and 13493+1000 - or ss_wholesale_cost between 36 and 36+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 0 and 0+10 - or ss_coupon_amt between 7629 and 7629+1000 - or ss_wholesale_cost between 6 and 6+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 15257 and 15257+1000 - or ss_wholesale_cost between 31 and 31+20)) B6 -limit 100""" - qt_ds_shape_28 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query29.groovy deleted file mode 100644 index a8c46eb13c7b70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query29.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,avg(ss_quantity) as store_sales_quantity - ,avg(sr_return_quantity) as store_returns_quantity - ,avg(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1999,1999+1,1999+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_29 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query3.groovy deleted file mode 100644 index 2030616e958181..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query3.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100""" - qt_ds_shape_3 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query30.groovy deleted file mode 100644 index 301ac521b84480..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query30.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2002 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'IN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100""" - qt_ds_shape_30 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query31.groovy deleted file mode 100644 index 607dc93051d195..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query31.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 2000 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 2000 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 2000 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 2000 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 2000 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =2000 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by web_q1_q2_increase""" - qt_ds_shape_31 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query32.groovy deleted file mode 100644 index e90fa68b73443a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query32.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 29 -and i_item_sk = cs_item_sk -and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '1999-01-07' and - (cast('1999-01-07' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100""" - qt_ds_shape_32 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query33.groovy deleted file mode 100644 index b09523f54b7d46..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query33.groovy +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Home')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2002 - and d_moy = 1 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100""" - qt_ds_shape_33 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query34.groovy deleted file mode 100644 index 6e61686781f9ff..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query34.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_county in ('Ziebach County','Daviess County','Walker County','Richland County', - 'Barrow County','Franklin Parish','Williamson County','Luce County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number""" - qt_ds_shape_34 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query35.groovy deleted file mode 100644 index 0359da335facba..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query35.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - max(cd_dep_count), - sum(cd_dep_count), - max(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - max(cd_dep_employed_count), - sum(cd_dep_employed_count), - max(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - max(cd_dep_college_count), - sum(cd_dep_college_count), - max(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100""" - qt_ds_shape_35 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query36.groovy deleted file mode 100644 index 80a114afc269a8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query36.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2002 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('SD','TN','GA','SC', - 'MO','AL','MI','OH') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100""" - qt_ds_shape_36 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query37.groovy deleted file mode 100644 index e5f353ab44a134..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query37.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 45 and 45 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-02-21' as date) and (cast('1999-02-21' as date) + interval 60 day) - and i_manufact_id in (856,707,1000,747) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_37 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query38.groovy deleted file mode 100644 index 7a7c71813664ed..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query38.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1183 and 1183 + 11 -) hot_cust -limit 100""" - qt_ds_shape_38 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query39.groovy deleted file mode 100644 index 55c7135fca9e8d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query39.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =1998 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov""" - qt_ds_shape_39 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query4.groovy deleted file mode 100644 index 969981b25f7cac..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query4.groovy +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_4 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query40.groovy deleted file mode 100644 index 8d261843553c84..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query40.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-04-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-04-02' as date) - interval 30 day) - and (cast ('2001-04-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100""" - qt_ds_shape_40 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query41.groovy deleted file mode 100644 index 68ea4200724d18..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query41.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select distinct(i_product_name) - from item i1 - where i_manufact_id between 748 and 748+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'gainsboro' or i_color = 'aquamarine') and - (i_units = 'Ounce' or i_units = 'Dozen') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'chiffon' or i_color = 'violet') and - (i_units = 'Ton' or i_units = 'Pound') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'chartreuse' or i_color = 'blue') and - (i_units = 'Each' or i_units = 'Oz') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'tan' or i_color = 'dodger') and - (i_units = 'Bunch' or i_units = 'Tsp') and - (i_size = 'medium' or i_size = 'economy') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'blanched' or i_color = 'tomato') and - (i_units = 'Tbl' or i_units = 'Case') and - (i_size = 'medium' or i_size = 'economy') - ) or - (i_category = 'Women' and - (i_color = 'almond' or i_color = 'lime') and - (i_units = 'Box' or i_units = 'Dram') and - (i_size = 'extra large' or i_size = 'small') - ) or - (i_category = 'Men' and - (i_color = 'peru' or i_color = 'saddle') and - (i_units = 'Pallet' or i_units = 'Gram') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'indian' or i_color = 'spring') and - (i_units = 'Unknown' or i_units = 'Carton') and - (i_size = 'medium' or i_size = 'economy') - )))) > 0 - order by i_product_name - limit 100""" - qt_ds_shape_41 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query42.groovy deleted file mode 100644 index df1baef595771e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query42.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=2002 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 """ - qt_ds_shape_42 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query43.groovy deleted file mode 100644 index fdc21823808f4f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query43.groovy +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100""" - qt_ds_shape_43 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query44.groovy deleted file mode 100644 index 085ef7323dc884..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query44.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 146 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 146 - and ss_addr_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100""" - qt_ds_shape_44 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query45.groovy deleted file mode 100644 index a8d4bc2bff2d7f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query45.groovy +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100""" - qt_ds_shape_45 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query46.groovy deleted file mode 100644 index 13edc5bad59a57..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query46.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 6 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (1999,1999+1,1999+2) - and store.s_city in ('Five Points','Centerville','Oak Grove','Fairview','Liberty') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100""" - qt_ds_shape_46 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query47.groovy deleted file mode 100644 index 63b6ee07cbee57..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query47.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_47 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query48.groovy deleted file mode 100644 index c8594b7fd10cf7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query48.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 1999 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'U' - and - cd_education_status = 'Primary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'W' - and - cd_education_status = 'College' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'MN', 'IA') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('VA', 'IL', 'TX') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MI', 'WI', 'IN') - and ss_net_profit between 50 and 25000 - ) - ) -""" - qt_ds_shape_48 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query49.groovy deleted file mode 100644 index 08a1e2ca819b96..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query49.groovy +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1999 - and d_moy = 12 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100""" - qt_ds_shape_49 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query5.groovy deleted file mode 100644 index 43524c4c6e8a28..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query5.groovy +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_5 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query50.groovy deleted file mode 100644 index fa5bd0908d6201..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query50.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100""" - qt_ds_shape_50 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query51.groovy deleted file mode 100644 index 7a00b2b7a52de4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query51.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1216 and 1216+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100""" - qt_ds_shape_51 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query52.groovy deleted file mode 100644 index aca150ca790e5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query52.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2002 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 """ - qt_ds_shape_52 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query53.groovy deleted file mode 100644 index 62240282e24579..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query53.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1200,1200+1,1200+2,1200+3,1200+4,1200+5,1200+6,1200+7,1200+8,1200+9,1200+10,1200+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100""" - qt_ds_shape_53 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query54.groovy deleted file mode 100644 index 0d7fb531304d95..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query54.groovy +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Women' - and i_class = 'maternity' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 5 - and d_year = 1998 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1998 and d_moy = 5) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1998 and d_moy = 5) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100""" - qt_ds_shape_54 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query55.groovy deleted file mode 100644 index 37af78fb24bf3c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query55.groovy +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=100 - and d_moy=12 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 """ - qt_ds_shape_55 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query56.groovy deleted file mode 100644 index b96a95094fe147..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query56.groovy +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','green','cyan')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 2 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100""" - qt_ds_shape_56 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query57.groovy deleted file mode 100644 index a258a524731224..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query57.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 1999 or - ( d_year = 1999-1 and d_moy =12) or - ( d_year = 1999+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_brand - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 1999 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_57 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query58.groovy deleted file mode 100644 index 0ebb6288536ae8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query58.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-03-24')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100""" - qt_ds_shape_58 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query59.groovy deleted file mode 100644 index 15005ea7f1638a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query59.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196 and 1196 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - ss_store_sk = s_store_sk and - d_month_seq between 1196+ 12 and 1196 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100""" - qt_ds_shape_59 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query6.groovy deleted file mode 100644 index 54b0e9c129fd72..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query6.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100""" - qt_ds_shape_6 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query60.groovy deleted file mode 100644 index d980e0cbc3a1b8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query60.groovy +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Children')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 8 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -7 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100""" - qt_ds_shape_60 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query61.groovy deleted file mode 100644 index 0ea2596fdf01be..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query61.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Jewelry' - and s_gmt_offset = -7 - and d_year = 1999 - and d_moy = 11) all_sales -order by promotions, total -limit 100""" - qt_ds_shape_61 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query62.groovy deleted file mode 100644 index 95fe33f4c608c0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query62.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100""" - qt_ds_shape_62 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query63.groovy deleted file mode 100644 index 367ee302c10a70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query63.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1181,1181+1,1181+2,1181+3,1181+4,1181+5,1181+6,1181+7,1181+8,1181+9,1181+10,1181+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100""" - qt_ds_shape_63 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query64.groovy deleted file mode 100644 index 5644db2d92c867..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query64.groovy +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('blanched','medium','brown','chocolate','burlywood','drab') and - i_current_price between 23 and 23 + 10 and - i_current_price between 23 + 1 and 23 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 2001 and - cs2.syear = 2001 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1""" - qt_ds_shape_64 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query65.groovy deleted file mode 100644 index 9f5108267560a3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query65.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1221 and 1221+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100""" - qt_ds_shape_65 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query66.groovy deleted file mode 100644 index e9b0f8c7e89f05..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query66.groovy +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 and 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('GREAT EASTERN ', ','), ' LATVIAN') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid_inc_ship_tax * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 1998 - and t_time between 48821 AND 48821+28800 - and sm_carrier in ('GREAT EASTERN','LATVIAN') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100""" - qt_ds_shape_66 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query67.groovy deleted file mode 100644 index bba4022c805c07..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query67.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1206 and 1206+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100""" - qt_ds_shape_67 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query68.groovy deleted file mode 100644 index 08bcc07785d10d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query68.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= -1) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Pleasant Hill','Five Points') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100""" - qt_ds_shape_68 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query69.groovy deleted file mode 100644 index 1122aa1716cd53..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query69.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('TX','VA','MI') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2000 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100""" - qt_ds_shape_69 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query7.groovy deleted file mode 100644 index 0031a3088da690..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query7.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_7 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query70.groovy deleted file mode 100644 index d79b36bec668a6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query70.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1213 and 1213+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1213 and 1213+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100""" - qt_ds_shape_70 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query71.groovy deleted file mode 100644 index 480c77aaffce75..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query71.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=1998 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=1998 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - """ - qt_ds_shape_71 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query72.groovy deleted file mode 100644 index 73b574b9fdd0dc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query72.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select /*+ SET_VAR(max_join_number_bushy_tree=10, memo_max_group_expression_size=15000)*/ i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '501-1000' - and d1.d_year = 2002 - and cd_marital_status = 'W' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100""" - qt_ds_shape_72 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query73.groovy deleted file mode 100644 index 19d399ee7ea84b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query73.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '501-1000' or - household_demographics.hd_buy_potential = 'Unknown') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Fairfield County','Walker County','Daviess County','Barrow County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc""" - qt_ds_shape_73 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query74.groovy deleted file mode 100644 index 65c65d66bcf294..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query74.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,stddev_samp(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 2,1,3 -limit 100""" - qt_ds_shape_74 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query75.groovy deleted file mode 100644 index 0afdc93abf33f8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query75.groovy +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Home' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Home') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=1999 - AND prev_yr.d_year=1999-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100""" - qt_ds_shape_75 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query76.groovy deleted file mode 100644 index 52b75d1628bfd7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query76.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_hdemo_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_hdemo_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_bill_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_bill_addr_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_warehouse_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_warehouse_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100""" - qt_ds_shape_76 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query77.groovy deleted file mode 100644 index 22483589531b2d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query77.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('1998-08-05' as date) - and (cast('1998-08-05' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_77 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query78.groovy deleted file mode 100644 index eee40a4fc03dfc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query78.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null - group by d_year, ss_item_sk, ss_customer_sk - ) - select -ss_item_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2000 -order by - ss_item_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100""" - qt_ds_shape_78 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query79.groovy deleted file mode 100644 index 45bd5452c078c6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query79.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 5 or household_demographics.hd_vehicle_count > 4) - and date_dim.d_dow = 1 - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100""" - qt_ds_shape_79 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query8.groovy deleted file mode 100644 index e44e47a69e9fdd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query8.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100""" - qt_ds_shape_8 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query80.groovy deleted file mode 100644 index a8924221229cbf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query80.groovy +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('1998-08-28' as date) - and (cast('1998-08-28' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_80 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query81.groovy deleted file mode 100644 index 85088a4553fa7c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query81.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2002 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'CA' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100""" - qt_ds_shape_81 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query82.groovy deleted file mode 100644 index 1663dcdb3ee7b9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query82.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 17 and 17+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('1999-07-09' as date) and (cast('1999-07-09' as date) + interval 60 day) - and i_manufact_id in (639,169,138,339) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_82 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query83.groovy deleted file mode 100644 index a8fd8561e20a09..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query83.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-06-06','2001-09-02','2001-11-11'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100""" - qt_ds_shape_83 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query84.groovy deleted file mode 100644 index 01433c96e96718..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query84.groovy +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Oakwood' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 5806 - and ib_upper_bound <= 5806 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100""" - qt_ds_shape_84 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query85.groovy deleted file mode 100644 index 858a94cd81d36b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query85.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 2000 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'M' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = '4 yr Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Secondary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'W' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('FL', 'TX', 'DE') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('IN', 'ND', 'ID') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('MT', 'IL', 'OH') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100""" - qt_ds_shape_85 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query86.groovy deleted file mode 100644 index 71e726382da63e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query86.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1224 and 1224+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100""" - qt_ds_shape_86 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query87.groovy deleted file mode 100644 index 52e6ab0b62425c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query87.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1184 and 1184+11) -) cool_cust -""" - qt_ds_shape_87 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query88.groovy deleted file mode 100644 index dcdeb6a1ac4386..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query88.groovy +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -""" - qt_ds_shape_88 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query89.groovy deleted file mode 100644 index 9ddbe3cf68ce57..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query89.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (1999) and - ((i_category in ('Jewelry','Shoes','Electronics') and - i_class in ('semi-precious','athletic','portable') - ) - or (i_category in ('Men','Music','Women') and - i_class in ('accessories','rock','maternity') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100""" - qt_ds_shape_89 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query9.groovy deleted file mode 100644 index df25c356c66e21..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query9.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - def ds = """select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 2972190 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 4505785 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 1575726 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 3188917 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 3525216 - then (select avg(ss_ext_sales_price) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_profit) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -""" - qt_ds_shape_9 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query90.groovy deleted file mode 100644 index 0f5cde4e25e30a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query90.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 10 and 10+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 16 and 16+1 - and household_demographics.hd_dep_count = 2 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100""" - qt_ds_shape_90 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query91.groovy deleted file mode 100644 index 3c706057a02f5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query91.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2001 -and d_moy = 11 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like '1001-5000%' -and ca_gmt_offset = -6 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc""" - qt_ds_shape_91 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query92.groovy deleted file mode 100644 index 1013519e91975e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query92.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 320 -and i_item_sk = ws_item_sk -and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2002-02-26' and - (cast('2002-02-26' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100""" - qt_ds_shape_92 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query93.groovy deleted file mode 100644 index 53d1d63184839e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query93.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'duplicate purchase') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100""" - qt_ds_shape_93 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query94.groovy deleted file mode 100644 index 4888bb535c6dc0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query94.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2000-2-01' and - (cast('2000-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_94 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query95.groovy deleted file mode 100644 index 4f1712c93fd90b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query95.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=12' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '1999-2-01' and - (cast('1999-2-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'NC' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_95 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query96.groovy deleted file mode 100644 index 724e1a1f171702..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query96.groovy +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 3 - and store.s_store_name = 'ese' -order by count(*) -limit 100""" - qt_ds_shape_96 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query97.groovy deleted file mode 100644 index 0a4c7627524edb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query97.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1214 and 1214 + 11 -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100""" - qt_ds_shape_97 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query98.groovy deleted file mode 100644 index 9a401b0184eb3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query98.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Sports', 'Music', 'Shoes') - and ss_sold_date_sk = d_date_sk - and d_date between cast('2002-05-20' as date) - and (cast('2002-05-20' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio""" - qt_ds_shape_98 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query99.groovy deleted file mode 100644 index 6c16d02330fb8e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/query99.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1224 and 1224 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100""" - qt_ds_shape_99 """ - explain shape plan - ${ds} - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/tpcds_sf100_stats.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/tpcds_sf100_stats.groovy deleted file mode 100644 index fc537b63380ef0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf100/shape/tpcds_sf100_stats.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -suite('tpcds_sf100_stats') { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - def stats - stats = sql """ show column stats call_center ;""" - logger.info("${stats}") - stats = sql """ show column stats catalog_page ;""" - logger.info("${stats}") - stats = sql """ show column stats catalog_returns ;""" - logger.info("${stats}") - stats = sql """ show column stats catalog_sales ;""" - logger.info("${stats}") - stats = sql """ show column stats customer ;""" - logger.info("${stats}") - stats = sql """ show column stats customer_address ;""" - logger.info("${stats}") - stats = sql """ show column stats customer_demographics ;""" - logger.info("${stats}") - stats = sql """ show column stats date_dim ;""" - logger.info("${stats}") - stats = sql """ show column stats dbgen_version ;""" - logger.info("${stats}") - stats = sql """ show column stats household_demographics ;""" - logger.info("${stats}") - stats = sql """ show column stats income_band ;""" - logger.info("${stats}") - stats = sql """ show column stats inventory ;""" - logger.info("${stats}") - stats = sql """ show column stats item ;""" - logger.info("${stats}") - stats = sql """ show column stats promotion ;""" - logger.info("${stats}") - stats = sql """ show column stats reason ;""" - logger.info("${stats}") - stats = sql """ show column stats ship_mode ;""" - logger.info("${stats}") - stats = sql """ show column stats store ;""" - logger.info("${stats}") - stats = sql """ show column stats store_returns ;""" - logger.info("${stats}") - stats = sql """ show column stats store_sales ;""" - logger.info("${stats}") - stats = sql """ show column stats time_dim ;""" - logger.info("${stats}") - stats = sql """ show column stats warehouse ;""" - logger.info("${stats}") - stats = sql """ show column stats web_page ;""" - logger.info("${stats}") - stats = sql """ show column stats web_returns ;""" - logger.info("${stats}") - stats = sql """ show column stats web_sales ;""" - logger.info("${stats}") - stats = sql """ show column stats web_site ;""" - logger.info("${stats}") - -} \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.groovy deleted file mode 100644 index f7acdd26eb8690..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query13.groovy +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = 'College' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Primary' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'W' - and cd_education_status = '2 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('IL', 'TN', 'TX') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('WY', 'OH', 'ID') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MS', 'SC', 'IA') - and ss_net_profit between 50 and 250 - )) -""" - qt_ds_shape_13 ''' - explain shape plan - select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = 'College' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Primary' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'W' - and cd_education_status = '2 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('IL', 'TN', 'TX') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('WY', 'OH', 'ID') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MS', 'SC', 'IA') - and ss_net_profit between 50 and 250 - )) - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.groovy deleted file mode 100644 index 17466ecacf346e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query19.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=14 - and d_moy=11 - and d_year=2002 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 """ - qt_ds_shape_19 ''' - explain shape plan - select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=14 - and d_moy=11 - and d_year=2002 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.groovy deleted file mode 100644 index d42411e0c011d2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query44.groovy +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100""" - qt_ds_shape_44 ''' - explain shape plan - select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.groovy deleted file mode 100644 index 9a0eb67545c428..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query45.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100""" - qt_ds_shape_45 ''' - explain shape plan - select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.groovy deleted file mode 100644 index 198b39b8583407..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query54.groovy +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Music' - and i_class = 'country' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 1 - and d_year = 1999 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1999 and d_moy = 1) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1999 and d_moy = 1) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100""" - qt_ds_shape_54 ''' - explain shape plan - with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Music' - and i_class = 'country' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 1 - and d_year = 1999 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1999 and d_moy = 1) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1999 and d_moy = 1) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.groovy deleted file mode 100644 index c1dd7cbd87be1b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query56.groovy +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100""" - qt_ds_shape_56 ''' - explain shape plan - with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.groovy deleted file mode 100644 index 80639ff76d3720..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query6.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100""" - qt_ds_shape_6 ''' - explain shape plan - select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.groovy deleted file mode 100644 index d34a2c3fb719c5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query61.groovy +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) all_sales -order by promotions, total -limit 100""" - qt_ds_shape_61 ''' - explain shape plan - select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) all_sales -order by promotions, total -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.groovy deleted file mode 100644 index 2477c21b67ce2d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query68.groovy +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 3 or - household_demographics.hd_vehicle_count= 4) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Fairview','Midway') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100""" - qt_ds_shape_68 ''' - explain shape plan - select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 3 or - household_demographics.hd_vehicle_count= 4) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Fairview','Midway') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.groovy deleted file mode 100644 index 33e361246ba77c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query8.groovy +++ /dev/null @@ -1,252 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100""" - qt_ds_shape_8 ''' - explain shape plan - select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.groovy deleted file mode 100644 index 0502b8403fb321..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query91.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2000 -and d_moy = 12 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like 'Unknown%' -and ca_gmt_offset = -7 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc""" - qt_ds_shape_91 ''' - explain shape plan - select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2000 -and d_moy = 12 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like 'Unknown%' -and ca_gmt_offset = -7 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.groovy deleted file mode 100644 index b4e6f86e2c50bd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/bs_downgrade_shape/query95.groovy +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - def ds = """with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2001-4-01' and - (cast('2001-4-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'VA' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_95 ''' - explain shape plan - with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2001-4-01' and - (cast('2001-4-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'VA' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/gen_shape.py b/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/gen_shape.py deleted file mode 100644 index 8317bd1859f261..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/gen_shape.py +++ /dev/null @@ -1,26 +0,0 @@ -# // Licensed to the Apache Software Foundation (ASF) under one -# // or more contributor license agreements. See the NOTICE file -# // distributed with this work for additional information -# // regarding copyright ownership. The ASF licenses this file -# // to you under the Apache License, Version 2.0 (the -# // "License"); you may not use this file except in compliance -# // with the License. You may obtain a copy of the License at -# // -# // http://www.apache.org/licenses/LICENSE-2.0 -# // -# // Unless required by applicable law or agreed to in writing, -# // software distributed under the License is distributed on an -# // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# // KIND, either express or implied. See the License for the -# // specific language governing permissions and limitations -# // under the License. -if __name__ == '__main__': - with open('shape.tmpl', 'r') as f: - tmpl = f.read() - for i in range(1,100): - with open('../../../../tools/tpcds-tools/queries/sf1000/query'+str(i)+'.sql', 'r') as fi: - casei = tmpl.replace('{--}', str(i)) - casei = casei.replace('{query}', fi.read().split(";")[0]) - - with open('../shape/query'+str(i)+'.groovy', 'w') as out: - out.write(casei) \ No newline at end of file diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/shape.tmpl b/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/shape.tmpl deleted file mode 100644 index c25fd3f36b03f4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/ddl/shape.tmpl +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query{--}") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - def ds = """{query}""" - qt_ds_shape_{--} ''' - explain shape plan - {query} - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.groovy deleted file mode 100644 index aada0585602b1d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/eliminate_empty/query10_empty.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - multi_sql """ - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set parallel_fragment_exec_instance_num=8; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=true; - set enable_nereids_timeout = false; - set enable_runtime_filter_prune=false; - set runtime_filter_type=8; - set dump_nereids_memo=false; - set disable_nereids_rules=PRUNE_EMPTY_PARTITION; - """ - - qt_ds_shape_10 ''' - explain shape plan - select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Fairfield County','Campbell County','Washtenaw County','Escambia County','Cleburne County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 ANd 3+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/load.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/load.groovy deleted file mode 100644 index 14c11d3ea846f6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/load.groovy +++ /dev/null @@ -1,2548 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - - sql ''' - drop table if exists customer_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_demographics ( - cd_demo_sk bigint not null, - cd_gender char(1), - cd_marital_status char(1), - cd_education_status char(20), - cd_purchase_estimate integer, - cd_credit_rating char(10), - cd_dep_count integer, - cd_dep_employed_count integer, - cd_dep_college_count integer - ) - DUPLICATE KEY(cd_demo_sk) - DISTRIBUTED BY HASH(cd_gender) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists reason - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS reason ( - r_reason_sk bigint not null, - r_reason_id char(16) not null, - r_reason_desc char(100) - ) - DUPLICATE KEY(r_reason_sk) - DISTRIBUTED BY HASH(r_reason_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists date_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS date_dim ( - d_date_sk bigint not null, - d_date_id char(16) not null, - d_date datev2, - d_month_seq integer, - d_week_seq integer, - d_quarter_seq integer, - d_year integer, - d_dow integer, - d_moy integer, - d_dom integer, - d_qoy integer, - d_fy_year integer, - d_fy_quarter_seq integer, - d_fy_week_seq integer, - d_day_name char(9), - d_quarter_name char(6), - d_holiday char(1), - d_weekend char(1), - d_following_holiday char(1), - d_first_dom integer, - d_last_dom integer, - d_same_day_ly integer, - d_same_day_lq integer, - d_current_day char(1), - d_current_week char(1), - d_current_month char(1), - d_current_quarter char(1), - d_current_year char(1) - ) - DUPLICATE KEY(d_date_sk) - DISTRIBUTED BY HASH(d_date_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists warehouse - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS warehouse ( - w_warehouse_sk bigint not null, - w_warehouse_id char(16) not null, - w_warehouse_name varchar(20), - w_warehouse_sq_ft integer, - w_street_number char(10), - w_street_name varchar(60), - w_street_type char(15), - w_suite_number char(10), - w_city varchar(60), - w_county varchar(30), - w_state char(2), - w_zip char(10), - w_country varchar(20), - w_gmt_offset decimalv3(5,2) - ) - DUPLICATE KEY(w_warehouse_sk) - DISTRIBUTED BY HASH(w_warehouse_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_sales ( - cs_sold_date_sk bigint, - cs_item_sk bigint not null, - cs_order_number bigint not null, - cs_sold_time_sk bigint, - cs_ship_date_sk bigint, - cs_bill_customer_sk bigint, - cs_bill_cdemo_sk bigint, - cs_bill_hdemo_sk bigint, - cs_bill_addr_sk bigint, - cs_ship_customer_sk bigint, - cs_ship_cdemo_sk bigint, - cs_ship_hdemo_sk bigint, - cs_ship_addr_sk bigint, - cs_call_center_sk bigint, - cs_catalog_page_sk bigint, - cs_ship_mode_sk bigint, - cs_warehouse_sk bigint, - cs_promo_sk bigint, - cs_quantity integer, - cs_wholesale_cost decimalv3(7,2), - cs_list_price decimalv3(7,2), - cs_sales_price decimalv3(7,2), - cs_ext_discount_amt decimalv3(7,2), - cs_ext_sales_price decimalv3(7,2), - cs_ext_wholesale_cost decimalv3(7,2), - cs_ext_list_price decimalv3(7,2), - cs_ext_tax decimalv3(7,2), - cs_coupon_amt decimalv3(7,2), - cs_ext_ship_cost decimalv3(7,2), - cs_net_paid decimalv3(7,2), - cs_net_paid_inc_tax decimalv3(7,2), - cs_net_paid_inc_ship decimalv3(7,2), - cs_net_paid_inc_ship_tax decimalv3(7,2), - cs_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(cs_sold_date_sk, cs_item_sk) - DISTRIBUTED BY HASH(cs_item_sk, cs_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists call_center - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS call_center ( - cc_call_center_sk bigint not null, - cc_call_center_id char(16) not null, - cc_rec_start_date datev2, - cc_rec_end_date datev2, - cc_closed_date_sk integer, - cc_open_date_sk integer, - cc_name varchar(50), - cc_class varchar(50), - cc_employees integer, - cc_sq_ft integer, - cc_hours char(20), - cc_manager varchar(40), - cc_mkt_id integer, - cc_mkt_class char(50), - cc_mkt_desc varchar(100), - cc_market_manager varchar(40), - cc_division integer, - cc_division_name varchar(50), - cc_company integer, - cc_company_name char(50), - cc_street_number char(10), - cc_street_name varchar(60), - cc_street_type char(15), - cc_suite_number char(10), - cc_city varchar(60), - cc_county varchar(30), - cc_state char(2), - cc_zip char(10), - cc_country varchar(20), - cc_gmt_offset decimalv3(5,2), - cc_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(cc_call_center_sk) - DISTRIBUTED BY HASH(cc_call_center_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists inventory - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS inventory ( - inv_date_sk bigint not null, - inv_item_sk bigint not null, - inv_warehouse_sk bigint, - inv_quantity_on_hand integer - ) - DUPLICATE KEY(inv_date_sk, inv_item_sk, inv_warehouse_sk) - DISTRIBUTED BY HASH(inv_date_sk, inv_item_sk, inv_warehouse_sk) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_returns ( - cr_item_sk bigint not null, - cr_order_number bigint not null, - cr_returned_date_sk bigint, - cr_returned_time_sk bigint, - cr_refunded_customer_sk bigint, - cr_refunded_cdemo_sk bigint, - cr_refunded_hdemo_sk bigint, - cr_refunded_addr_sk bigint, - cr_returning_customer_sk bigint, - cr_returning_cdemo_sk bigint, - cr_returning_hdemo_sk bigint, - cr_returning_addr_sk bigint, - cr_call_center_sk bigint, - cr_catalog_page_sk bigint, - cr_ship_mode_sk bigint, - cr_warehouse_sk bigint, - cr_reason_sk bigint, - cr_return_quantity integer, - cr_return_amount decimalv3(7,2), - cr_return_tax decimalv3(7,2), - cr_return_amt_inc_tax decimalv3(7,2), - cr_fee decimalv3(7,2), - cr_return_ship_cost decimalv3(7,2), - cr_refunded_cash decimalv3(7,2), - cr_reversed_charge decimalv3(7,2), - cr_store_credit decimalv3(7,2), - cr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(cr_item_sk, cr_order_number) - DISTRIBUTED BY HASH(cr_item_sk, cr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "catalog" - ) - ''' - - sql ''' - drop table if exists household_demographics - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS household_demographics ( - hd_demo_sk bigint not null, - hd_income_band_sk bigint, - hd_buy_potential char(15), - hd_dep_count integer, - hd_vehicle_count integer - ) - DUPLICATE KEY(hd_demo_sk) - DISTRIBUTED BY HASH(hd_demo_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer_address - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer_address ( - ca_address_sk bigint not null, - ca_address_id char(16) not null, - ca_street_number char(10), - ca_street_name varchar(60), - ca_street_type char(15), - ca_suite_number char(10), - ca_city varchar(60), - ca_county varchar(30), - ca_state char(2), - ca_zip char(10), - ca_country varchar(20), - ca_gmt_offset decimalv3(5,2), - ca_location_type char(20) - ) - DUPLICATE KEY(ca_address_sk) - DISTRIBUTED BY HASH(ca_address_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists income_band - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS income_band ( - ib_income_band_sk bigint not null, - ib_lower_bound integer, - ib_upper_bound integer - ) - DUPLICATE KEY(ib_income_band_sk) - DISTRIBUTED BY HASH(ib_income_band_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists catalog_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS catalog_page ( - cp_catalog_page_sk bigint not null, - cp_catalog_page_id char(16) not null, - cp_start_date_sk integer, - cp_end_date_sk integer, - cp_department varchar(50), - cp_catalog_number integer, - cp_catalog_page_number integer, - cp_description varchar(100), - cp_type varchar(100) - ) - DUPLICATE KEY(cp_catalog_page_sk) - DISTRIBUTED BY HASH(cp_catalog_page_sk) BUCKETS 3 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists item - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS item ( - i_item_sk bigint not null, - i_item_id char(16) not null, - i_rec_start_date datev2, - i_rec_end_date datev2, - i_item_desc varchar(200), - i_current_price decimalv3(7,2), - i_wholesale_cost decimalv3(7,2), - i_brand_id integer, - i_brand char(50), - i_class_id integer, - i_class char(50), - i_category_id integer, - i_category char(50), - i_manufact_id integer, - i_manufact char(50), - i_size char(20), - i_formulation char(20), - i_color char(20), - i_units char(10), - i_container char(10), - i_manager_id integer, - i_product_name char(50) - ) - DUPLICATE KEY(i_item_sk) - DISTRIBUTED BY HASH(i_item_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_returns ( - wr_item_sk bigint not null, - wr_order_number bigint not null, - wr_returned_date_sk bigint, - wr_returned_time_sk bigint, - wr_refunded_customer_sk bigint, - wr_refunded_cdemo_sk bigint, - wr_refunded_hdemo_sk bigint, - wr_refunded_addr_sk bigint, - wr_returning_customer_sk bigint, - wr_returning_cdemo_sk bigint, - wr_returning_hdemo_sk bigint, - wr_returning_addr_sk bigint, - wr_web_page_sk bigint, - wr_reason_sk bigint, - wr_return_quantity integer, - wr_return_amt decimalv3(7,2), - wr_return_tax decimalv3(7,2), - wr_return_amt_inc_tax decimalv3(7,2), - wr_fee decimalv3(7,2), - wr_return_ship_cost decimalv3(7,2), - wr_refunded_cash decimalv3(7,2), - wr_reversed_charge decimalv3(7,2), - wr_account_credit decimalv3(7,2), - wr_net_loss decimalv3(7,2) - ) - DUPLICATE KEY(wr_item_sk, wr_order_number) - DISTRIBUTED BY HASH(wr_item_sk, wr_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists web_site - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_site ( - web_site_sk bigint not null, - web_site_id char(16) not null, - web_rec_start_date datev2, - web_rec_end_date datev2, - web_name varchar(50), - web_open_date_sk bigint, - web_close_date_sk bigint, - web_class varchar(50), - web_manager varchar(40), - web_mkt_id integer, - web_mkt_class varchar(50), - web_mkt_desc varchar(100), - web_market_manager varchar(40), - web_company_id integer, - web_company_name char(50), - web_street_number char(10), - web_street_name varchar(60), - web_street_type char(15), - web_suite_number char(10), - web_city varchar(60), - web_county varchar(30), - web_state char(2), - web_zip char(10), - web_country varchar(20), - web_gmt_offset decimalv3(5,2), - web_tax_percentage decimalv3(5,2) - ) - DUPLICATE KEY(web_site_sk) - DISTRIBUTED BY HASH(web_site_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists promotion - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS promotion ( - p_promo_sk bigint not null, - p_promo_id char(16) not null, - p_start_date_sk bigint, - p_end_date_sk bigint, - p_item_sk bigint, - p_cost decimalv3(15,2), - p_response_targe integer, - p_promo_name char(50), - p_channel_dmail char(1), - p_channel_email char(1), - p_channel_catalog char(1), - p_channel_tv char(1), - p_channel_radio char(1), - p_channel_press char(1), - p_channel_event char(1), - p_channel_demo char(1), - p_channel_details varchar(100), - p_purpose char(15), - p_discount_active char(1) - ) - DUPLICATE KEY(p_promo_sk) - DISTRIBUTED BY HASH(p_promo_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_sales ( - ws_sold_date_sk bigint, - ws_item_sk bigint not null, - ws_order_number bigint not null, - ws_sold_time_sk bigint, - ws_ship_date_sk bigint, - ws_bill_customer_sk bigint, - ws_bill_cdemo_sk bigint, - ws_bill_hdemo_sk bigint, - ws_bill_addr_sk bigint, - ws_ship_customer_sk bigint, - ws_ship_cdemo_sk bigint, - ws_ship_hdemo_sk bigint, - ws_ship_addr_sk bigint, - ws_web_page_sk bigint, - ws_web_site_sk bigint, - ws_ship_mode_sk bigint, - ws_warehouse_sk bigint, - ws_promo_sk bigint, - ws_quantity integer, - ws_wholesale_cost decimalv3(7,2), - ws_list_price decimalv3(7,2), - ws_sales_price decimalv3(7,2), - ws_ext_discount_amt decimalv3(7,2), - ws_ext_sales_price decimalv3(7,2), - ws_ext_wholesale_cost decimalv3(7,2), - ws_ext_list_price decimalv3(7,2), - ws_ext_tax decimalv3(7,2), - ws_coupon_amt decimalv3(7,2), - ws_ext_ship_cost decimalv3(7,2), - ws_net_paid decimalv3(7,2), - ws_net_paid_inc_tax decimalv3(7,2), - ws_net_paid_inc_ship decimalv3(7,2), - ws_net_paid_inc_ship_tax decimalv3(7,2), - ws_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ws_sold_date_sk, ws_item_sk) - DISTRIBUTED BY HASH(ws_item_sk, ws_order_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "web" - ) - ''' - - sql ''' - drop table if exists store - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store ( - s_store_sk bigint not null, - s_store_id char(16) not null, - s_rec_start_date datev2, - s_rec_end_date datev2, - s_closed_date_sk bigint, - s_store_name varchar(50), - s_number_employees integer, - s_floor_space integer, - s_hours char(20), - s_manager varchar(40), - s_market_id integer, - s_geography_class varchar(100), - s_market_desc varchar(100), - s_market_manager varchar(40), - s_division_id integer, - s_division_name varchar(50), - s_company_id integer, - s_company_name varchar(50), - s_street_number varchar(10), - s_street_name varchar(60), - s_street_type char(15), - s_suite_number char(10), - s_city varchar(60), - s_county varchar(30), - s_state char(2), - s_zip char(10), - s_country varchar(20), - s_gmt_offset decimalv3(5,2), - s_tax_precentage decimalv3(5,2) - ) - DUPLICATE KEY(s_store_sk) - DISTRIBUTED BY HASH(s_store_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists time_dim - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS time_dim ( - t_time_sk bigint not null, - t_time_id char(16) not null, - t_time integer, - t_hour integer, - t_minute integer, - t_second integer, - t_am_pm char(2), - t_shift char(20), - t_sub_shift char(20), - t_meal_time char(20) - ) - DUPLICATE KEY(t_time_sk) - DISTRIBUTED BY HASH(t_time_sk) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists web_page - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS web_page ( - wp_web_page_sk bigint not null, - wp_web_page_id char(16) not null, - wp_rec_start_date datev2, - wp_rec_end_date datev2, - wp_creation_date_sk bigint, - wp_access_date_sk bigint, - wp_autogen_flag char(1), - wp_customer_sk bigint, - wp_url varchar(100), - wp_type char(50), - wp_char_count integer, - wp_link_count integer, - wp_image_count integer, - wp_max_ad_count integer - ) - DUPLICATE KEY(wp_web_page_sk) - DISTRIBUTED BY HASH(wp_web_page_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists store_returns - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_returns ( - sr_item_sk bigint not null, - sr_ticket_number bigint not null, - sr_returned_date_sk bigint, - sr_return_time_sk bigint, - sr_customer_sk bigint, - sr_cdemo_sk bigint, - sr_hdemo_sk bigint, - sr_addr_sk bigint, - sr_store_sk bigint, - sr_reason_sk bigint, - sr_return_quantity integer, - sr_return_amt decimalv3(7,2), - sr_return_tax decimalv3(7,2), - sr_return_amt_inc_tax decimalv3(7,2), - sr_fee decimalv3(7,2), - sr_return_ship_cost decimalv3(7,2), - sr_refunded_cash decimalv3(7,2), - sr_reversed_charge decimalv3(7,2), - sr_store_credit decimalv3(7,2), - sr_net_loss decimalv3(7,2) - ) - duplicate key(sr_item_sk, sr_ticket_number) - distributed by hash (sr_item_sk, sr_ticket_number) buckets 32 - properties ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists store_sales - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS store_sales ( - ss_sold_date_sk bigint, - ss_item_sk bigint not null, - ss_ticket_number bigint not null, - ss_sold_time_sk bigint, - ss_customer_sk bigint, - ss_cdemo_sk bigint, - ss_hdemo_sk bigint, - ss_addr_sk bigint, - ss_store_sk bigint, - ss_promo_sk bigint, - ss_quantity integer, - ss_wholesale_cost decimalv3(7,2), - ss_list_price decimalv3(7,2), - ss_sales_price decimalv3(7,2), - ss_ext_discount_amt decimalv3(7,2), - ss_ext_sales_price decimalv3(7,2), - ss_ext_wholesale_cost decimalv3(7,2), - ss_ext_list_price decimalv3(7,2), - ss_ext_tax decimalv3(7,2), - ss_coupon_amt decimalv3(7,2), - ss_net_paid decimalv3(7,2), - ss_net_paid_inc_tax decimalv3(7,2), - ss_net_profit decimalv3(7,2) - ) - DUPLICATE KEY(ss_sold_date_sk, ss_item_sk) - DISTRIBUTED BY HASH(ss_item_sk, ss_ticket_number) BUCKETS 32 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "store" - ) - ''' - - sql ''' - drop table if exists ship_mode - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS ship_mode ( - sm_ship_mode_sk bigint not null, - sm_ship_mode_id char(16) not null, - sm_type char(30), - sm_code char(10), - sm_carrier char(20), - sm_contract char(20) - ) - DUPLICATE KEY(sm_ship_mode_sk) - DISTRIBUTED BY HASH(sm_ship_mode_sk) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists customer - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS customer ( - c_customer_sk bigint not null, - c_customer_id char(16) not null, - c_current_cdemo_sk bigint, - c_current_hdemo_sk bigint, - c_current_addr_sk bigint, - c_first_shipto_date_sk bigint, - c_first_sales_date_sk bigint, - c_salutation char(10), - c_first_name char(20), - c_last_name char(30), - c_preferred_cust_flag char(1), - c_birth_day integer, - c_birth_month integer, - c_birth_year integer, - c_birth_country varchar(20), - c_login char(13), - c_email_address char(50), - c_last_review_date_sk bigint - ) - DUPLICATE KEY(c_customer_sk) - DISTRIBUTED BY HASH(c_customer_id) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - drop table if exists dbgen_version - ''' - - sql ''' - CREATE TABLE IF NOT EXISTS dbgen_version - ( - dv_version varchar(16) , - dv_create_date datev2 , - dv_create_time datetime , - dv_cmdline_args varchar(200) - ) - DUPLICATE KEY(dv_version) - DISTRIBUTED BY HASH(dv_version) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ) - ''' - - sql ''' - alter table customer add constraint customer_pk primary key (c_customer_sk); - ''' - - sql ''' - alter table customer add constraint customer_uk unique (c_customer_id); - ''' - - sql ''' - alter table store_sales add constraint ss_fk foreign key(ss_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table web_sales add constraint ws_fk foreign key(ws_bill_customer_sk) references customer(c_customer_sk); - ''' - - sql ''' - alter table catalog_sales add constraint cs_fk foreign key(cs_bill_customer_sk) references customer(c_customer_sk); - ''' - - sql """ - alter table customer_demographics modify column cd_dep_employed_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table date_dim modify column d_day_name set stats ('row_count'='73049', 'ndv'='7', 'num_nulls'='0', 'min_value'='Friday', 'max_value'='Wednesday', 'data_size'='521779') - """ - - sql """ - alter table date_dim modify column d_following_holiday set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_same_day_ly set stats ('row_count'='73049', 'ndv'='72450', 'num_nulls'='0', 'min_value'='2414657', 'max_value'='2487705', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_city set stats ('row_count'='20', 'ndv'='12', 'num_nulls'='0', 'min_value'='Fairview', 'max_value'='Shiloh', 'data_size'='183') - """ - - sql """ - alter table warehouse modify column w_street_type set stats ('row_count'='20', 'ndv'='14', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='71') - """ - - sql """ - alter table catalog_sales modify column cs_call_center_sk set stats ('row_count'='1439980416', 'ndv'='42', 'num_nulls'='7199711', 'min_value'='1', 'max_value'='42', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_ship set stats ('row_count'='1439980416', 'ndv'='2505826', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='43956.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_sales_price set stats ('row_count'='1439980416', 'ndv'='29306', 'num_nulls'='7200276', 'min_value'='0.00', 'max_value'='300.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_class set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='0', 'min_value'='large', 'max_value'='small', 'data_size'='226') - """ - - sql """ - alter table call_center modify column cc_country set stats ('row_count'='42', 'ndv'='1', 'num_nulls'='0', 'min_value'='United States', 'max_value'='United States', 'data_size'='546') - """ - - sql """ - alter table call_center modify column cc_county set stats ('row_count'='42', 'ndv'='16', 'num_nulls'='0', 'min_value'='Barrow County', 'max_value'='Williamson County', 'data_size'='627') - """ - - sql """ - alter table call_center modify column cc_mkt_class set stats ('row_count'='42', 'ndv'='36', 'num_nulls'='0', 'min_value'='A bit narrow forms matter animals. Consist', 'max_value'='Yesterday new men can make moreov', 'data_size'='1465') - """ - - sql """ - alter table call_center modify column cc_sq_ft set stats ('row_count'='42', 'ndv'='31', 'num_nulls'='0', 'min_value'='-1890660328', 'max_value'='2122480316', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_state set stats ('row_count'='42', 'ndv'='14', 'num_nulls'='0', 'min_value'='FL', 'max_value'='WV', 'data_size'='84') - """ - - sql """ - alter table inventory modify column inv_warehouse_sk set stats ('row_count'='783000000', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='6264000000') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_addr_sk set stats ('row_count'='143996756', 'ndv'='6015811', 'num_nulls'='2881609', 'min_value'='1', 'max_value'='6000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_cash set stats ('row_count'='143996756', 'ndv'='1107525', 'num_nulls'='2879192', 'min_value'='0.00', 'max_value'='26955.24', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_cdemo_sk set stats ('row_count'='143996756', 'ndv'='1916366', 'num_nulls'='2881314', 'min_value'='1', 'max_value'='1920800', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_amt_inc_tax set stats ('row_count'='143996756', 'ndv'='1544502', 'num_nulls'='2881886', 'min_value'='0.00', 'max_value'='30418.06', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returning_addr_sk set stats ('row_count'='143996756', 'ndv'='6015811', 'num_nulls'='2883215', 'min_value'='1', 'max_value'='6000000', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_buy_potential set stats ('row_count'='7200', 'ndv'='6', 'num_nulls'='0', 'min_value'='0-500', 'max_value'='Unknown', 'data_size'='54000') - """ - - sql """ - alter table customer_address modify column ca_address_id set stats ('row_count'='6000000', 'ndv'='5984931', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPEAA', 'data_size'='96000000') - """ - - sql """ - alter table customer_address modify column ca_address_sk set stats ('row_count'='6000000', 'ndv'='6015811', 'num_nulls'='0', 'min_value'='1', 'max_value'='6000000', 'data_size'='48000000') - """ - - sql """ - alter table customer_address modify column ca_country set stats ('row_count'='6000000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='75661794') - """ - - sql """ - alter table customer_address modify column ca_location_type set stats ('row_count'='6000000', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='single family', 'data_size'='52372545') - """ - - sql """ - alter table customer_address modify column ca_street_number set stats ('row_count'='6000000', 'ndv'='1002', 'num_nulls'='0', 'min_value'='', 'max_value'='999', 'data_size'='16837336') - """ - - sql """ - alter table customer_address modify column ca_suite_number set stats ('row_count'='6000000', 'ndv'='76', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite Y', 'data_size'='45911575') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_id set stats ('row_count'='30000', 'ndv'='29953', 'num_nulls'='0', 'min_value'='AAAAAAAAAAABAAAA', 'max_value'='AAAAAAAAPPPGAAAA', 'data_size'='480000') - """ - - sql """ - alter table item modify column i_rec_end_date set stats ('row_count'='300000', 'ndv'='3', 'num_nulls'='150000', 'min_value'='1999-10-27', 'max_value'='2001-10-26', 'data_size'='1200000') - """ - - sql """ - alter table web_returns modify column wr_refunded_addr_sk set stats ('row_count'='71997522', 'ndv'='6015811', 'num_nulls'='3239971', 'min_value'='1', 'max_value'='6000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_reversed_charge set stats ('row_count'='71997522', 'ndv'='692680', 'num_nulls'='3239546', 'min_value'='0.00', 'max_value'='23194.77', 'data_size'='287990088') - """ - - sql """ - alter table web_site modify column web_state set stats ('row_count'='54', 'ndv'='18', 'num_nulls'='0', 'min_value'='AL', 'max_value'='WV', 'data_size'='108') - """ - - sql """ - alter table promotion modify column p_end_date_sk set stats ('row_count'='1500', 'ndv'='683', 'num_nulls'='18', 'min_value'='2450113', 'max_value'='2450967', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_bill_hdemo_sk set stats ('row_count'='720000376', 'ndv'='7251', 'num_nulls'='180139', 'min_value'='1', 'max_value'='7200', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ext_ship_cost set stats ('row_count'='720000376', 'ndv'='567477', 'num_nulls'='180084', 'min_value'='0.00', 'max_value'='14950.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_addr_sk set stats ('row_count'='720000376', 'ndv'='6015811', 'num_nulls'='179848', 'min_value'='1', 'max_value'='6000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ship_mode_sk set stats ('row_count'='720000376', 'ndv'='20', 'num_nulls'='180017', 'min_value'='1', 'max_value'='20', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_warehouse_sk set stats ('row_count'='720000376', 'ndv'='20', 'num_nulls'='180105', 'min_value'='1', 'max_value'='20', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_company_name set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6965') - """ - - sql """ - alter table store modify column s_gmt_offset set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='6', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_manager set stats ('row_count'='1002', 'ndv'='739', 'num_nulls'='0', 'min_value'='', 'max_value'='Zane Clifton', 'data_size'='12649') - """ - - sql """ - alter table store modify column s_street_number set stats ('row_count'='1002', 'ndv'='521', 'num_nulls'='0', 'min_value'='', 'max_value'='999', 'data_size'='2874') - """ - - sql """ - alter table time_dim modify column t_meal_time set stats ('row_count'='86400', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='lunch', 'data_size'='248400') - """ - - sql """ - alter table time_dim modify column t_time set stats ('row_count'='86400', 'ndv'='86684', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='345600') - """ - - sql """ - alter table web_page modify column wp_creation_date_sk set stats ('row_count'='3000', 'ndv'='199', 'num_nulls'='33', 'min_value'='2450604', 'max_value'='2450815', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_customer_sk set stats ('row_count'='3000', 'ndv'='713', 'num_nulls'='2147', 'min_value'='9522', 'max_value'='11995685', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_max_ad_count set stats ('row_count'='3000', 'ndv'='5', 'num_nulls'='31', 'min_value'='0', 'max_value'='4', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_url set stats ('row_count'='3000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='http://www.foo.com', 'data_size'='53406') - """ - - sql """ - alter table store_returns modify column sr_refunded_cash set stats ('row_count'='287999764', 'ndv'='928470', 'num_nulls'='10081294', 'min_value'='0.00', 'max_value'='18173.96', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_tax set stats ('row_count'='287999764', 'ndv'='117247', 'num_nulls'='10081332', 'min_value'='0.00', 'max_value'='1682.04', 'data_size'='1151999056') - """ - - sql """ - alter table store_sales modify column ss_customer_sk set stats ('row_count'='2879987999', 'ndv'='12157481', 'num_nulls'='129590766', 'min_value'='1', 'max_value'='12000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_hdemo_sk set stats ('row_count'='2879987999', 'ndv'='7251', 'num_nulls'='129594559', 'min_value'='1', 'max_value'='7200', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_store_sk set stats ('row_count'='2879987999', 'ndv'='499', 'num_nulls'='129572050', 'min_value'='1', 'max_value'='1000', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_ship_mode_id set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'data_size'='320') - """ - - sql """ - alter table ship_mode modify column sm_ship_mode_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table customer modify column c_first_name set stats ('row_count'='12000000', 'ndv'='5140', 'num_nulls'='0', 'min_value'='', 'max_value'='Zulma', 'data_size'='67593278') - """ - - sql """ - alter table customer modify column c_first_sales_date_sk set stats ('row_count'='12000000', 'ndv'='3644', 'num_nulls'='419856', 'min_value'='2448998', 'max_value'='2452648', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_first_shipto_date_sk set stats ('row_count'='12000000', 'ndv'='3644', 'num_nulls'='420769', 'min_value'='2449028', 'max_value'='2452678', 'data_size'='96000000') - """ - - sql """ - alter table customer_demographics modify column cd_dep_college_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table date_dim modify column d_dow set stats ('row_count'='73049', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_fy_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'num_nulls'='0', 'min_value'='1', 'max_value'='801', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_qoy set stats ('row_count'='73049', 'ndv'='4', 'num_nulls'='0', 'min_value'='1', 'max_value'='4', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_quarter_seq set stats ('row_count'='73049', 'ndv'='801', 'num_nulls'='0', 'min_value'='1', 'max_value'='801', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_street_name set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='', 'max_value'='Wilson Elm', 'data_size'='176') - """ - - sql """ - alter table warehouse modify column w_suite_number set stats ('row_count'='20', 'ndv'='18', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite X', 'data_size'='150') - """ - - sql """ - alter table catalog_sales modify column cs_bill_cdemo_sk set stats ('row_count'='1439980416', 'ndv'='1916366', 'num_nulls'='7202134', 'min_value'='1', 'max_value'='1920800', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_bill_hdemo_sk set stats ('row_count'='1439980416', 'ndv'='7251', 'num_nulls'='7198837', 'min_value'='1', 'max_value'='7200', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ext_ship_cost set stats ('row_count'='1439980416', 'ndv'='573238', 'num_nulls'='7202537', 'min_value'='0.00', 'max_value'='14994.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_name set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='California', 'max_value'='Pacific Northwest_2', 'data_size'='572') - """ - - sql """ - alter table call_center modify column cc_street_name set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='1st', 'max_value'='Willow', 'data_size'='356') - """ - - sql """ - alter table call_center modify column cc_zip set stats ('row_count'='42', 'ndv'='19', 'num_nulls'='0', 'min_value'='18605', 'max_value'='98048', 'data_size'='210') - """ - - sql """ - alter table inventory modify column inv_quantity_on_hand set stats ('row_count'='783000000', 'ndv'='1006', 'num_nulls'='39153758', 'min_value'='0', 'max_value'='1000', 'data_size'='3132000000') - """ - - sql """ - alter table catalog_returns modify column cr_catalog_page_sk set stats ('row_count'='143996756', 'ndv'='17005', 'num_nulls'='2882502', 'min_value'='1', 'max_value'='25207', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_income_band_sk set stats ('row_count'='7200', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='57600') - """ - - sql """ - alter table catalog_page modify column cp_description set stats ('row_count'='30000', 'ndv'='30141', 'num_nulls'='0', 'min_value'='', 'max_value'='Youngsters worry both workers. Fascinating characters take cheap never alive studies. Direct, old', 'data_size'='2215634') - """ - - sql """ - alter table item modify column i_item_id set stats ('row_count'='300000', 'ndv'='150851', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPBAAA', 'data_size'='4800000') - """ - - sql """ - alter table web_returns modify column wr_account_credit set stats ('row_count'='71997522', 'ndv'='683955', 'num_nulls'='3241972', 'min_value'='0.00', 'max_value'='23166.33', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_net_loss set stats ('row_count'='71997522', 'ndv'='815608', 'num_nulls'='3240573', 'min_value'='0.50', 'max_value'='15887.84', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_amt set stats ('row_count'='71997522', 'ndv'='808311', 'num_nulls'='3238405', 'min_value'='0.00', 'max_value'='29191.00', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_amt_inc_tax set stats ('row_count'='71997522', 'ndv'='1359913', 'num_nulls'='3239765', 'min_value'='0.00', 'max_value'='30393.01', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_return_quantity set stats ('row_count'='71997522', 'ndv'='100', 'num_nulls'='3238643', 'min_value'='1', 'max_value'='100', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returning_addr_sk set stats ('row_count'='71997522', 'ndv'='6015811', 'num_nulls'='3239658', 'min_value'='1', 'max_value'='6000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_returning_customer_sk set stats ('row_count'='71997522', 'ndv'='12119220', 'num_nulls'='3237281', 'min_value'='1', 'max_value'='12000000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_mkt_desc set stats ('row_count'='54', 'ndv'='38', 'num_nulls'='0', 'min_value'='Acres see else children. Mutual too', 'max_value'='Windows increase to a differences. Other parties might in', 'data_size'='3473') - """ - - sql """ - alter table web_site modify column web_mkt_id set stats ('row_count'='54', 'ndv'='6', 'num_nulls'='1', 'min_value'='1', 'max_value'='6', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_rec_end_date set stats ('row_count'='54', 'ndv'='3', 'num_nulls'='27', 'min_value'='1999-08-16', 'max_value'='2001-08-15', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_site_id set stats ('row_count'='54', 'ndv'='27', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPBAAAAAA', 'data_size'='864') - """ - - sql """ - alter table web_site modify column web_street_type set stats ('row_count'='54', 'ndv'='20', 'num_nulls'='0', 'min_value'='Ave', 'max_value'='Wy', 'data_size'='208') - """ - - sql """ - alter table promotion modify column p_channel_demo set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1479') - """ - - sql """ - alter table promotion modify column p_channel_details set stats ('row_count'='1500', 'ndv'='1490', 'num_nulls'='0', 'min_value'='', 'max_value'='Young, valuable companies watch walls. Payments can flour', 'data_size'='59126') - """ - - sql """ - alter table promotion modify column p_channel_event set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1482') - """ - - sql """ - alter table promotion modify column p_discount_active set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1473') - """ - - sql """ - alter table promotion modify column p_promo_sk set stats ('row_count'='1500', 'ndv'='1489', 'num_nulls'='0', 'min_value'='1', 'max_value'='1500', 'data_size'='12000') - """ - - sql """ - alter table promotion modify column p_purpose set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='10374') - """ - - sql """ - alter table web_sales modify column ws_bill_cdemo_sk set stats ('row_count'='720000376', 'ndv'='1916366', 'num_nulls'='179788', 'min_value'='1', 'max_value'='1920800', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_sold_date_sk set stats ('row_count'='720000376', 'ndv'='1820', 'num_nulls'='179921', 'min_value'='2450816', 'max_value'='2452642', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_web_site_sk set stats ('row_count'='720000376', 'ndv'='54', 'num_nulls'='179930', 'min_value'='1', 'max_value'='54', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_city set stats ('row_count'='1002', 'ndv'='55', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodlawn', 'data_size'='9238') - """ - - sql """ - alter table store modify column s_company_id set stats ('row_count'='1002', 'ndv'='1', 'num_nulls'='7', 'min_value'='1', 'max_value'='1', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_county set stats ('row_count'='1002', 'ndv'='28', 'num_nulls'='0', 'min_value'='', 'max_value'='Ziebach County', 'data_size'='14291') - """ - - sql """ - alter table store modify column s_geography_class set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6972') - """ - - sql """ - alter table store modify column s_hours set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='8AM-8AM', 'data_size'='7088') - """ - - sql """ - alter table store modify column s_store_id set stats ('row_count'='1002', 'ndv'='501', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPBAAAAA', 'data_size'='16032') - """ - - sql """ - alter table store modify column s_zip set stats ('row_count'='1002', 'ndv'='354', 'num_nulls'='0', 'min_value'='', 'max_value'='99454', 'data_size'='4975') - """ - - sql """ - alter table time_dim modify column t_am_pm set stats ('row_count'='86400', 'ndv'='2', 'num_nulls'='0', 'min_value'='AM', 'max_value'='PM', 'data_size'='172800') - """ - - sql """ - alter table time_dim modify column t_minute set stats ('row_count'='86400', 'ndv'='60', 'num_nulls'='0', 'min_value'='0', 'max_value'='59', 'data_size'='345600') - """ - - sql """ - alter table web_page modify column wp_web_page_id set stats ('row_count'='3000', 'ndv'='1501', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPKAAAAA', 'data_size'='48000') - """ - - sql """ - alter table web_page modify column wp_web_page_sk set stats ('row_count'='3000', 'ndv'='2984', 'num_nulls'='0', 'min_value'='1', 'max_value'='3000', 'data_size'='24000') - """ - - sql """ - alter table store_returns modify column sr_return_amt set stats ('row_count'='287999764', 'ndv'='671228', 'num_nulls'='10080055', 'min_value'='0.00', 'max_value'='19434.00', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_returned_date_sk set stats ('row_count'='287999764', 'ndv'='2010', 'num_nulls'='10079607', 'min_value'='2450820', 'max_value'='2452822', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_ext_tax set stats ('row_count'='2879987999', 'ndv'='149597', 'num_nulls'='129588732', 'min_value'='0.00', 'max_value'='1797.48', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_current_cdemo_sk set stats ('row_count'='12000000', 'ndv'='1913901', 'num_nulls'='419895', 'min_value'='1', 'max_value'='1920800', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_customer_id set stats ('row_count'='12000000', 'ndv'='11921032', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAABAA', 'max_value'='AAAAAAAAPPPPPKAA', 'data_size'='192000000') - """ - - sql """ - alter table date_dim modify column d_current_day set stats ('row_count'='73049', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_current_month set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_date set stats ('row_count'='73049', 'ndv'='73250', 'num_nulls'='0', 'min_value'='1900-01-02', 'max_value'='2100-01-01', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_moy set stats ('row_count'='73049', 'ndv'='12', 'num_nulls'='0', 'min_value'='1', 'max_value'='12', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_gmt_offset set stats ('row_count'='20', 'ndv'='3', 'num_nulls'='1', 'min_value'='-7.00', 'max_value'='-5.00', 'data_size'='80') - """ - - sql """ - alter table warehouse modify column w_warehouse_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table warehouse modify column w_warehouse_sq_ft set stats ('row_count'='20', 'ndv'='19', 'num_nulls'='1', 'min_value'='73065', 'max_value'='977787', 'data_size'='80') - """ - - sql """ - alter table catalog_sales modify column cs_ext_sales_price set stats ('row_count'='1439980416', 'ndv'='1100662', 'num_nulls'='7199625', 'min_value'='0.00', 'max_value'='29943.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ext_wholesale_cost set stats ('row_count'='1439980416', 'ndv'='393180', 'num_nulls'='7199876', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_item_sk set stats ('row_count'='1439980416', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_tax set stats ('row_count'='1439980416', 'ndv'='2422238', 'num_nulls'='7200702', 'min_value'='0.00', 'max_value'='32376.27', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_date_sk set stats ('row_count'='1439980416', 'ndv'='1933', 'num_nulls'='7200707', 'min_value'='2450817', 'max_value'='2452744', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_warehouse_sk set stats ('row_count'='1439980416', 'ndv'='20', 'num_nulls'='7200688', 'min_value'='1', 'max_value'='20', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_division set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_division_name set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='able', 'max_value'='pri', 'data_size'='164') - """ - - sql """ - alter table call_center modify column cc_manager set stats ('row_count'='42', 'ndv'='28', 'num_nulls'='0', 'min_value'='Alden Snyder', 'max_value'='Wayne Ray', 'data_size'='519') - """ - - sql """ - alter table call_center modify column cc_rec_start_date set stats ('row_count'='42', 'ndv'='4', 'num_nulls'='0', 'min_value'='1998-01-01', 'max_value'='2002-01-01', 'data_size'='168') - """ - - sql """ - alter table catalog_returns modify column cr_call_center_sk set stats ('row_count'='143996756', 'ndv'='42', 'num_nulls'='2881668', 'min_value'='1', 'max_value'='42', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_net_loss set stats ('row_count'='143996756', 'ndv'='911034', 'num_nulls'='2881704', 'min_value'='0.50', 'max_value'='16095.08', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_customer_sk set stats ('row_count'='143996756', 'ndv'='12156363', 'num_nulls'='2879017', 'min_value'='1', 'max_value'='12000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_refunded_hdemo_sk set stats ('row_count'='143996756', 'ndv'='7251', 'num_nulls'='2882107', 'min_value'='1', 'max_value'='7200', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_returning_customer_sk set stats ('row_count'='143996756', 'ndv'='12157481', 'num_nulls'='2879023', 'min_value'='1', 'max_value'='12000000', 'data_size'='1151974048') - """ - - sql """ - alter table customer_address modify column ca_gmt_offset set stats ('row_count'='6000000', 'ndv'='6', 'num_nulls'='180219', 'min_value'='-10.00', 'max_value'='-5.00', 'data_size'='24000000') - """ - - sql """ - alter table item modify column i_color set stats ('row_count'='300000', 'ndv'='93', 'num_nulls'='0', 'min_value'='', 'max_value'='yellow', 'data_size'='1610293') - """ - - sql """ - alter table item modify column i_manufact set stats ('row_count'='300000', 'ndv'='1004', 'num_nulls'='0', 'min_value'='', 'max_value'='pripripri', 'data_size'='3379693') - """ - - sql """ - alter table item modify column i_product_name set stats ('row_count'='300000', 'ndv'='294994', 'num_nulls'='0', 'min_value'='', 'max_value'='pripripripripriought', 'data_size'='6849199') - """ - - sql """ - alter table web_returns modify column wr_returned_time_sk set stats ('row_count'='71997522', 'ndv'='87677', 'num_nulls'='3238574', 'min_value'='0', 'max_value'='86399', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_manager set stats ('row_count'='54', 'ndv'='40', 'num_nulls'='0', 'min_value'='', 'max_value'='William Young', 'data_size'='658') - """ - - sql """ - alter table web_site modify column web_mkt_class set stats ('row_count'='54', 'ndv'='40', 'num_nulls'='0', 'min_value'='', 'max_value'='Written, political plans show to the models. T', 'data_size'='1822') - """ - - sql """ - alter table web_site modify column web_rec_start_date set stats ('row_count'='54', 'ndv'='4', 'num_nulls'='2', 'min_value'='1997-08-16', 'max_value'='2001-08-16', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_street_number set stats ('row_count'='54', 'ndv'='36', 'num_nulls'='0', 'min_value'='', 'max_value'='983', 'data_size'='154') - """ - - sql """ - alter table promotion modify column p_channel_catalog set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1482') - """ - - sql """ - alter table promotion modify column p_promo_id set stats ('row_count'='1500', 'ndv'='1519', 'num_nulls'='0', 'min_value'='AAAAAAAAAABAAAAA', 'max_value'='AAAAAAAAPPEAAAAA', 'data_size'='24000') - """ - - sql """ - alter table web_sales modify column ws_bill_customer_sk set stats ('row_count'='720000376', 'ndv'='12103729', 'num_nulls'='179817', 'min_value'='1', 'max_value'='12000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_list_price set stats ('row_count'='720000376', 'ndv'='29396', 'num_nulls'='180053', 'min_value'='1.00', 'max_value'='300.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_sales_price set stats ('row_count'='720000376', 'ndv'='29288', 'num_nulls'='180005', 'min_value'='0.00', 'max_value'='300.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_hdemo_sk set stats ('row_count'='720000376', 'ndv'='7251', 'num_nulls'='179824', 'min_value'='1', 'max_value'='7200', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_closed_date_sk set stats ('row_count'='1002', 'ndv'='163', 'num_nulls'='729', 'min_value'='2450820', 'max_value'='2451313', 'data_size'='8016') - """ - - sql """ - alter table store modify column s_division_id set stats ('row_count'='1002', 'ndv'='1', 'num_nulls'='6', 'min_value'='1', 'max_value'='1', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_market_desc set stats ('row_count'='1002', 'ndv'='765', 'num_nulls'='0', 'min_value'='', 'max_value'='Yesterday left factors handle continuing co', 'data_size'='57638') - """ - - sql """ - alter table store modify column s_market_id set stats ('row_count'='1002', 'ndv'='10', 'num_nulls'='8', 'min_value'='1', 'max_value'='10', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_state set stats ('row_count'='1002', 'ndv'='22', 'num_nulls'='0', 'min_value'='', 'max_value'='WV', 'data_size'='1994') - """ - - sql """ - alter table store modify column s_store_sk set stats ('row_count'='1002', 'ndv'='988', 'num_nulls'='0', 'min_value'='1', 'max_value'='1002', 'data_size'='8016') - """ - - sql """ - alter table store modify column s_street_name set stats ('row_count'='1002', 'ndv'='549', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodland Oak', 'data_size'='8580') - """ - - sql """ - alter table web_page modify column wp_access_date_sk set stats ('row_count'='3000', 'ndv'='101', 'num_nulls'='31', 'min_value'='2452548', 'max_value'='2452648', 'data_size'='24000') - """ - - sql """ - alter table web_page modify column wp_char_count set stats ('row_count'='3000', 'ndv'='1883', 'num_nulls'='42', 'min_value'='303', 'max_value'='8523', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_addr_sk set stats ('row_count'='287999764', 'ndv'='6015811', 'num_nulls'='10082311', 'min_value'='1', 'max_value'='6000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_return_time_sk set stats ('row_count'='287999764', 'ndv'='32660', 'num_nulls'='10082805', 'min_value'='28799', 'max_value'='61199', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_store_sk set stats ('row_count'='287999764', 'ndv'='499', 'num_nulls'='10081871', 'min_value'='1', 'max_value'='1000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_coupon_amt set stats ('row_count'='2879987999', 'ndv'='1161208', 'num_nulls'='129609101', 'min_value'='0.00', 'max_value'='19778.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_sales_price set stats ('row_count'='2879987999', 'ndv'='19780', 'num_nulls'='129598061', 'min_value'='0.00', 'max_value'='200.00', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_birth_country set stats ('row_count'='12000000', 'ndv'='211', 'num_nulls'='0', 'min_value'='', 'max_value'='ZIMBABWE', 'data_size'='100750845') - """ - - sql """ - alter table customer modify column c_birth_month set stats ('row_count'='12000000', 'ndv'='12', 'num_nulls'='419629', 'min_value'='1', 'max_value'='12', 'data_size'='48000000') - """ - - sql """ - alter table customer modify column c_customer_sk set stats ('row_count'='12000000', 'ndv'='12157481', 'num_nulls'='0', 'min_value'='1', 'max_value'='12000000', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_email_address set stats ('row_count'='12000000', 'ndv'='11642077', 'num_nulls'='0', 'min_value'='', 'max_value'='Zulma.Young@aDhzZzCzYN.edu', 'data_size'='318077849') - """ - - sql """ - alter table customer modify column c_last_review_date_sk set stats ('row_count'='12000000', 'ndv'='366', 'num_nulls'='419900', 'min_value'='2452283', 'max_value'='2452648', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_preferred_cust_flag set stats ('row_count'='12000000', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='11580510') - """ - - sql """ - alter table dbgen_version modify column dv_version set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='3.2.0', 'max_value'='3.2.0', 'data_size'='5') - """ - - sql """ - alter table customer_demographics modify column cd_purchase_estimate set stats ('row_count'='1920800', 'ndv'='20', 'num_nulls'='0', 'min_value'='500', 'max_value'='10000', 'data_size'='7683200') - """ - - sql """ - alter table reason modify column r_reason_id set stats ('row_count'='65', 'ndv'='65', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPDAAAAAA', 'data_size'='1040') - """ - - sql """ - alter table reason modify column r_reason_sk set stats ('row_count'='65', 'ndv'='65', 'num_nulls'='0', 'min_value'='1', 'max_value'='65', 'data_size'='520') - """ - - sql """ - alter table date_dim modify column d_current_week set stats ('row_count'='73049', 'ndv'='1', 'num_nulls'='0', 'min_value'='N', 'max_value'='N', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_first_dom set stats ('row_count'='73049', 'ndv'='2410', 'num_nulls'='0', 'min_value'='2415021', 'max_value'='2488070', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_fy_year set stats ('row_count'='73049', 'ndv'='202', 'num_nulls'='0', 'min_value'='1900', 'max_value'='2100', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_last_dom set stats ('row_count'='73049', 'ndv'='2419', 'num_nulls'='0', 'min_value'='2415020', 'max_value'='2488372', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_month_seq set stats ('row_count'='73049', 'ndv'='2398', 'num_nulls'='0', 'min_value'='0', 'max_value'='2400', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_quarter_name set stats ('row_count'='73049', 'ndv'='799', 'num_nulls'='0', 'min_value'='1900Q1', 'max_value'='2100Q1', 'data_size'='438294') - """ - - sql """ - alter table warehouse modify column w_county set stats ('row_count'='20', 'ndv'='14', 'num_nulls'='0', 'min_value'='Bronx County', 'max_value'='Ziebach County', 'data_size'='291') - """ - - sql """ - alter table warehouse modify column w_street_number set stats ('row_count'='20', 'ndv'='19', 'num_nulls'='0', 'min_value'='', 'max_value'='957', 'data_size'='54') - """ - - sql """ - alter table warehouse modify column w_warehouse_name set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='', 'max_value'='Therefore urg', 'data_size'='307') - """ - - sql """ - alter table catalog_sales modify column cs_ext_discount_amt set stats ('row_count'='1439980416', 'ndv'='1100115', 'num_nulls'='7201054', 'min_value'='0.00', 'max_value'='29982.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid_inc_ship_tax set stats ('row_count'='1439980416', 'ndv'='3312360', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='46593.36', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_promo_sk set stats ('row_count'='1439980416', 'ndv'='1489', 'num_nulls'='7202844', 'min_value'='1', 'max_value'='1500', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_call_center_id set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPBAAAAAA', 'data_size'='672') - """ - - sql """ - alter table call_center modify column cc_employees set stats ('row_count'='42', 'ndv'='30', 'num_nulls'='0', 'min_value'='69020', 'max_value'='6879074', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_suite_number set stats ('row_count'='42', 'ndv'='18', 'num_nulls'='0', 'min_value'='Suite 0', 'max_value'='Suite W', 'data_size'='326') - """ - - sql """ - alter table catalog_returns modify column cr_item_sk set stats ('row_count'='143996756', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_reason_sk set stats ('row_count'='143996756', 'ndv'='65', 'num_nulls'='2881950', 'min_value'='1', 'max_value'='65', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_ship_cost set stats ('row_count'='143996756', 'ndv'='483467', 'num_nulls'='2883436', 'min_value'='0.00', 'max_value'='14273.28', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_ship_mode_sk set stats ('row_count'='143996756', 'ndv'='20', 'num_nulls'='2879879', 'min_value'='1', 'max_value'='20', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_store_credit set stats ('row_count'='143996756', 'ndv'='802237', 'num_nulls'='2880469', 'min_value'='0.00', 'max_value'='23215.15', 'data_size'='575987024') - """ - - sql """ - alter table customer_address modify column ca_city set stats ('row_count'='6000000', 'ndv'='977', 'num_nulls'='0', 'min_value'='', 'max_value'='Zion', 'data_size'='52096290') - """ - - sql """ - alter table customer_address modify column ca_state set stats ('row_count'='6000000', 'ndv'='52', 'num_nulls'='0', 'min_value'='', 'max_value'='WY', 'data_size'='11640128') - """ - - sql """ - alter table customer_address modify column ca_street_name set stats ('row_count'='6000000', 'ndv'='8173', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodland Woodland', 'data_size'='50697257') - """ - - sql """ - alter table customer_address modify column ca_street_type set stats ('row_count'='6000000', 'ndv'='21', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='24441630') - """ - - sql """ - alter table catalog_page modify column cp_catalog_number set stats ('row_count'='30000', 'ndv'='109', 'num_nulls'='297', 'min_value'='1', 'max_value'='109', 'data_size'='120000') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_number set stats ('row_count'='30000', 'ndv'='279', 'num_nulls'='294', 'min_value'='1', 'max_value'='277', 'data_size'='120000') - """ - - sql """ - alter table catalog_page modify column cp_catalog_page_sk set stats ('row_count'='30000', 'ndv'='30439', 'num_nulls'='0', 'min_value'='1', 'max_value'='30000', 'data_size'='240000') - """ - - sql """ - alter table catalog_page modify column cp_start_date_sk set stats ('row_count'='30000', 'ndv'='91', 'num_nulls'='286', 'min_value'='2450815', 'max_value'='2453005', 'data_size'='120000') - """ - - sql """ - alter table item modify column i_rec_start_date set stats ('row_count'='300000', 'ndv'='4', 'num_nulls'='784', 'min_value'='1997-10-27', 'max_value'='2001-10-27', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_units set stats ('row_count'='300000', 'ndv'='22', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='1253652') - """ - - sql """ - alter table web_returns modify column wr_refunded_hdemo_sk set stats ('row_count'='71997522', 'ndv'='7251', 'num_nulls'='3238545', 'min_value'='1', 'max_value'='7200', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_return_ship_cost set stats ('row_count'='71997522', 'ndv'='451263', 'num_nulls'='3239048', 'min_value'='0.00', 'max_value'='14352.10', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returned_date_sk set stats ('row_count'='71997522', 'ndv'='2188', 'num_nulls'='3239259', 'min_value'='2450819', 'max_value'='2453002', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_returning_cdemo_sk set stats ('row_count'='71997522', 'ndv'='1916366', 'num_nulls'='3239192', 'min_value'='1', 'max_value'='1920800', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_suite_number set stats ('row_count'='54', 'ndv'='38', 'num_nulls'='0', 'min_value'='Suite 100', 'max_value'='Suite Y', 'data_size'='430') - """ - - sql """ - alter table promotion modify column p_start_date_sk set stats ('row_count'='1500', 'ndv'='685', 'num_nulls'='23', 'min_value'='2450096', 'max_value'='2450915', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_coupon_amt set stats ('row_count'='720000376', 'ndv'='1505315', 'num_nulls'='179933', 'min_value'='0.00', 'max_value'='28824.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ext_wholesale_cost set stats ('row_count'='720000376', 'ndv'='393180', 'num_nulls'='180060', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_ship set stats ('row_count'='720000376', 'ndv'='2414838', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='44263.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_date_sk set stats ('row_count'='720000376', 'ndv'='1952', 'num_nulls'='180011', 'min_value'='2450817', 'max_value'='2452762', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_web_page_sk set stats ('row_count'='720000376', 'ndv'='2984', 'num_nulls'='179732', 'min_value'='1', 'max_value'='3000', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_country set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='12961') - """ - - sql """ - alter table store modify column s_store_name set stats ('row_count'='1002', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='3916') - """ - - sql """ - alter table time_dim modify column t_second set stats ('row_count'='86400', 'ndv'='60', 'num_nulls'='0', 'min_value'='0', 'max_value'='59', 'data_size'='345600') - """ - - sql """ - alter table time_dim modify column t_sub_shift set stats ('row_count'='86400', 'ndv'='4', 'num_nulls'='0', 'min_value'='afternoon', 'max_value'='night', 'data_size'='597600') - """ - - sql """ - alter table web_page modify column wp_image_count set stats ('row_count'='3000', 'ndv'='7', 'num_nulls'='26', 'min_value'='1', 'max_value'='7', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_type set stats ('row_count'='3000', 'ndv'='8', 'num_nulls'='0', 'min_value'='', 'max_value'='welcome', 'data_size'='18867') - """ - - sql """ - alter table store_returns modify column sr_customer_sk set stats ('row_count'='287999764', 'ndv'='12157481', 'num_nulls'='10081624', 'min_value'='1', 'max_value'='12000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_hdemo_sk set stats ('row_count'='287999764', 'ndv'='7251', 'num_nulls'='10083275', 'min_value'='1', 'max_value'='7200', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_addr_sk set stats ('row_count'='2879987999', 'ndv'='6015811', 'num_nulls'='129589799', 'min_value'='1', 'max_value'='6000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_item_sk set stats ('row_count'='2879987999', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_quantity set stats ('row_count'='2879987999', 'ndv'='100', 'num_nulls'='129584258', 'min_value'='1', 'max_value'='100', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ticket_number set stats ('row_count'='2879987999', 'ndv'='238830448', 'num_nulls'='0', 'min_value'='1', 'max_value'='240000000', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_wholesale_cost set stats ('row_count'='2879987999', 'ndv'='9905', 'num_nulls'='129590273', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='11519951996') - """ - - sql """ - alter table ship_mode modify column sm_type set stats ('row_count'='20', 'ndv'='6', 'num_nulls'='0', 'min_value'='EXPRESS', 'max_value'='TWO DAY', 'data_size'='150') - """ - - sql """ - alter table customer modify column c_current_addr_sk set stats ('row_count'='12000000', 'ndv'='5243359', 'num_nulls'='0', 'min_value'='3', 'max_value'='6000000', 'data_size'='96000000') - """ - - sql """ - alter table customer modify column c_last_name set stats ('row_count'='12000000', 'ndv'='4990', 'num_nulls'='0', 'min_value'='', 'max_value'='Zuniga', 'data_size'='70991730') - """ - - sql """ - alter table dbgen_version modify column dv_cmdline_args set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='-SCALE 1000 -PARALLEL 64 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/tpcds1t/tpcds-data', 'max_value'='-SCALE 1000 -PARALLEL 64 -CHILD 1 -TERMINATE N -DIR /mnt/datadisk0/tpcds1t/tpcds-data', 'data_size'='86') - """ - - sql """ - alter table date_dim modify column d_current_quarter set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_date_sk set stats ('row_count'='73049', 'ndv'='73042', 'num_nulls'='0', 'min_value'='2415022', 'max_value'='2488070', 'data_size'='584392') - """ - - sql """ - alter table date_dim modify column d_holiday set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table warehouse modify column w_country set stats ('row_count'='20', 'ndv'='1', 'num_nulls'='0', 'min_value'='United States', 'max_value'='United States', 'data_size'='260') - """ - - sql """ - alter table warehouse modify column w_state set stats ('row_count'='20', 'ndv'='13', 'num_nulls'='0', 'min_value'='AL', 'max_value'='TN', 'data_size'='40') - """ - - sql """ - alter table catalog_sales modify column cs_bill_addr_sk set stats ('row_count'='1439980416', 'ndv'='6015811', 'num_nulls'='7199539', 'min_value'='1', 'max_value'='6000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_bill_customer_sk set stats ('row_count'='1439980416', 'ndv'='12157481', 'num_nulls'='7201919', 'min_value'='1', 'max_value'='12000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_net_paid set stats ('row_count'='1439980416', 'ndv'='1809875', 'num_nulls'='7197668', 'min_value'='0.00', 'max_value'='29943.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_addr_sk set stats ('row_count'='1439980416', 'ndv'='6015811', 'num_nulls'='7198232', 'min_value'='1', 'max_value'='6000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_mode_sk set stats ('row_count'='1439980416', 'ndv'='20', 'num_nulls'='7201083', 'min_value'='1', 'max_value'='20', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_sold_date_sk set stats ('row_count'='1439980416', 'ndv'='1835', 'num_nulls'='7203326', 'min_value'='2450815', 'max_value'='2452654', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_sold_time_sk set stats ('row_count'='1439980416', 'ndv'='87677', 'num_nulls'='7201329', 'min_value'='0', 'max_value'='86399', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_wholesale_cost set stats ('row_count'='1439980416', 'ndv'='9905', 'num_nulls'='7201098', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='5759921664') - """ - - sql """ - alter table call_center modify column cc_company_name set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='able', 'max_value'='pri', 'data_size'='160') - """ - - sql """ - alter table call_center modify column cc_market_manager set stats ('row_count'='42', 'ndv'='35', 'num_nulls'='0', 'min_value'='Cesar Allen', 'max_value'='William Larsen', 'data_size'='524') - """ - - sql """ - alter table call_center modify column cc_mkt_id set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_street_type set stats ('row_count'='42', 'ndv'='11', 'num_nulls'='0', 'min_value'='Avenue', 'max_value'='Way', 'data_size'='184') - """ - - sql """ - alter table catalog_returns modify column cr_return_tax set stats ('row_count'='143996756', 'ndv'='149828', 'num_nulls'='2881611', 'min_value'='0.00', 'max_value'='2511.58', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returning_cdemo_sk set stats ('row_count'='143996756', 'ndv'='1916366', 'num_nulls'='2880543', 'min_value'='1', 'max_value'='1920800', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_returning_hdemo_sk set stats ('row_count'='143996756', 'ndv'='7251', 'num_nulls'='2882692', 'min_value'='1', 'max_value'='7200', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_reversed_charge set stats ('row_count'='143996756', 'ndv'='802509', 'num_nulls'='2881215', 'min_value'='0.00', 'max_value'='24033.84', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_warehouse_sk set stats ('row_count'='143996756', 'ndv'='20', 'num_nulls'='2882192', 'min_value'='1', 'max_value'='20', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_demo_sk set stats ('row_count'='7200', 'ndv'='7251', 'num_nulls'='0', 'min_value'='1', 'max_value'='7200', 'data_size'='57600') - """ - - sql """ - alter table household_demographics modify column hd_vehicle_count set stats ('row_count'='7200', 'ndv'='6', 'num_nulls'='0', 'min_value'='-1', 'max_value'='4', 'data_size'='28800') - """ - - sql """ - alter table customer_address modify column ca_zip set stats ('row_count'='6000000', 'ndv'='9253', 'num_nulls'='0', 'min_value'='', 'max_value'='99981', 'data_size'='29097610') - """ - - sql """ - alter table income_band modify column ib_income_band_sk set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='1', 'max_value'='20', 'data_size'='160') - """ - - sql """ - alter table catalog_page modify column cp_type set stats ('row_count'='30000', 'ndv'='4', 'num_nulls'='0', 'min_value'='', 'max_value'='quarterly', 'data_size'='227890') - """ - - sql """ - alter table item modify column i_brand set stats ('row_count'='300000', 'ndv'='714', 'num_nulls'='0', 'min_value'='', 'max_value'='univunivamalg #9', 'data_size'='4834917') - """ - - sql """ - alter table item modify column i_formulation set stats ('row_count'='300000', 'ndv'='224757', 'num_nulls'='0', 'min_value'='', 'max_value'='yellow98911509228741', 'data_size'='5984460') - """ - - sql """ - alter table item modify column i_item_desc set stats ('row_count'='300000', 'ndv'='217721', 'num_nulls'='0', 'min_value'='', 'max_value'='Youngsters used to save quite colour', 'data_size'='30093342') - """ - - sql """ - alter table web_returns modify column wr_fee set stats ('row_count'='71997522', 'ndv'='9958', 'num_nulls'='3238926', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_item_sk set stats ('row_count'='71997522', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_reason_sk set stats ('row_count'='71997522', 'ndv'='65', 'num_nulls'='3238897', 'min_value'='1', 'max_value'='65', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_refunded_customer_sk set stats ('row_count'='71997522', 'ndv'='12117831', 'num_nulls'='3242433', 'min_value'='1', 'max_value'='12000000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_city set stats ('row_count'='54', 'ndv'='31', 'num_nulls'='0', 'min_value'='', 'max_value'='Woodlawn', 'data_size'='491') - """ - - sql """ - alter table web_site modify column web_close_date_sk set stats ('row_count'='54', 'ndv'='18', 'num_nulls'='10', 'min_value'='2441265', 'max_value'='2446218', 'data_size'='432') - """ - - sql """ - alter table web_site modify column web_company_id set stats ('row_count'='54', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_company_name set stats ('row_count'='54', 'ndv'='7', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='203') - """ - - sql """ - alter table web_site modify column web_county set stats ('row_count'='54', 'ndv'='25', 'num_nulls'='0', 'min_value'='', 'max_value'='Williamson County', 'data_size'='762') - """ - - sql """ - alter table web_site modify column web_name set stats ('row_count'='54', 'ndv'='10', 'num_nulls'='0', 'min_value'='', 'max_value'='site_8', 'data_size'='312') - """ - - sql """ - alter table web_site modify column web_open_date_sk set stats ('row_count'='54', 'ndv'='27', 'num_nulls'='1', 'min_value'='2450373', 'max_value'='2450807', 'data_size'='432') - """ - - sql """ - alter table promotion modify column p_channel_dmail set stats ('row_count'='1500', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='1483') - """ - - sql """ - alter table promotion modify column p_channel_press set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1481') - """ - - sql """ - alter table promotion modify column p_channel_radio set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1479') - """ - - sql """ - alter table promotion modify column p_cost set stats ('row_count'='1500', 'ndv'='1', 'num_nulls'='18', 'min_value'='1000.00', 'max_value'='1000.00', 'data_size'='12000') - """ - - sql """ - alter table web_sales modify column ws_ext_tax set stats ('row_count'='720000376', 'ndv'='211413', 'num_nulls'='179695', 'min_value'='0.00', 'max_value'='2682.90', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_item_sk set stats ('row_count'='720000376', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_net_paid set stats ('row_count'='720000376', 'ndv'='1749360', 'num_nulls'='179970', 'min_value'='0.00', 'max_value'='29810.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_ship_tax set stats ('row_count'='720000376', 'ndv'='3224829', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='46004.19', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_paid_inc_tax set stats ('row_count'='720000376', 'ndv'='2354996', 'num_nulls'='179972', 'min_value'='0.00', 'max_value'='32492.90', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_order_number set stats ('row_count'='720000376', 'ndv'='60401176', 'num_nulls'='0', 'min_value'='1', 'max_value'='60000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_quantity set stats ('row_count'='720000376', 'ndv'='100', 'num_nulls'='179781', 'min_value'='1', 'max_value'='100', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ship_cdemo_sk set stats ('row_count'='720000376', 'ndv'='1916366', 'num_nulls'='180290', 'min_value'='1', 'max_value'='1920800', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_sold_time_sk set stats ('row_count'='720000376', 'ndv'='87677', 'num_nulls'='179980', 'min_value'='0', 'max_value'='86399', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_street_type set stats ('row_count'='1002', 'ndv'='21', 'num_nulls'='0', 'min_value'='', 'max_value'='Wy', 'data_size'='4189') - """ - - sql """ - alter table web_page modify column wp_autogen_flag set stats ('row_count'='3000', 'ndv'='3', 'num_nulls'='0', 'min_value'='', 'max_value'='Y', 'data_size'='2962') - """ - - sql """ - alter table web_page modify column wp_rec_start_date set stats ('row_count'='3000', 'ndv'='4', 'num_nulls'='29', 'min_value'='1997-09-03', 'max_value'='2001-09-03', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_net_loss set stats ('row_count'='287999764', 'ndv'='714210', 'num_nulls'='10080716', 'min_value'='0.50', 'max_value'='10776.08', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_amt_inc_tax set stats ('row_count'='287999764', 'ndv'='1259368', 'num_nulls'='10076879', 'min_value'='0.00', 'max_value'='20454.63', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_quantity set stats ('row_count'='287999764', 'ndv'='100', 'num_nulls'='10082815', 'min_value'='1', 'max_value'='100', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_return_ship_cost set stats ('row_count'='287999764', 'ndv'='355844', 'num_nulls'='10081927', 'min_value'='0.00', 'max_value'='9767.34', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_reversed_charge set stats ('row_count'='287999764', 'ndv'='700618', 'num_nulls'='10085976', 'min_value'='0.00', 'max_value'='17339.42', 'data_size'='1151999056') - """ - - sql """ - alter table store_sales modify column ss_net_paid_inc_tax set stats ('row_count'='2879987999', 'ndv'='1681767', 'num_nulls'='129609050', 'min_value'='0.00', 'max_value'='21769.48', 'data_size'='11519951996') - """ - - sql """ - alter table customer modify column c_birth_day set stats ('row_count'='12000000', 'ndv'='31', 'num_nulls'='420361', 'min_value'='1', 'max_value'='31', 'data_size'='48000000') - """ - - sql """ - alter table customer_demographics modify column cd_credit_rating set stats ('row_count'='1920800', 'ndv'='4', 'num_nulls'='0', 'min_value'='Good', 'max_value'='Unknown', 'data_size'='13445600') - """ - - sql """ - alter table customer_demographics modify column cd_demo_sk set stats ('row_count'='1920800', 'ndv'='1916366', 'num_nulls'='0', 'min_value'='1', 'max_value'='1920800', 'data_size'='15366400') - """ - - sql """ - alter table customer_demographics modify column cd_dep_count set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='0', 'max_value'='6', 'data_size'='7683200') - """ - - sql """ - alter table customer_demographics modify column cd_education_status set stats ('row_count'='1920800', 'ndv'='7', 'num_nulls'='0', 'min_value'='2 yr Degree', 'max_value'='Unknown', 'data_size'='18384800') - """ - - sql """ - alter table customer_demographics modify column cd_gender set stats ('row_count'='1920800', 'ndv'='2', 'num_nulls'='0', 'min_value'='F', 'max_value'='M', 'data_size'='1920800') - """ - - sql """ - alter table customer_demographics modify column cd_marital_status set stats ('row_count'='1920800', 'ndv'='5', 'num_nulls'='0', 'min_value'='D', 'max_value'='W', 'data_size'='1920800') - """ - - sql """ - alter table date_dim modify column d_date_id set stats ('row_count'='73049', 'ndv'='72907', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAAFCAA', 'max_value'='AAAAAAAAPPPPECAA', 'data_size'='1168784') - """ - - sql """ - alter table date_dim modify column d_fy_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'num_nulls'='0', 'min_value'='1', 'max_value'='10436', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_year set stats ('row_count'='73049', 'ndv'='202', 'num_nulls'='0', 'min_value'='1900', 'max_value'='2100', 'data_size'='292196') - """ - - sql """ - alter table warehouse modify column w_warehouse_id set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AAAAAAAAABAAAAAA', 'max_value'='AAAAAAAAPAAAAAAA', 'data_size'='320') - """ - - sql """ - alter table catalog_sales modify column cs_ext_list_price set stats ('row_count'='1439980416', 'ndv'='1160303', 'num_nulls'='7199542', 'min_value'='1.00', 'max_value'='30000.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ext_tax set stats ('row_count'='1439980416', 'ndv'='215267', 'num_nulls'='7200412', 'min_value'='0.00', 'max_value'='2673.27', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_quantity set stats ('row_count'='1439980416', 'ndv'='100', 'num_nulls'='7202885', 'min_value'='1', 'max_value'='100', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_ship_cdemo_sk set stats ('row_count'='1439980416', 'ndv'='1916366', 'num_nulls'='7200151', 'min_value'='1', 'max_value'='1920800', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_customer_sk set stats ('row_count'='1439980416', 'ndv'='12157481', 'num_nulls'='7201507', 'min_value'='1', 'max_value'='12000000', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_company set stats ('row_count'='42', 'ndv'='6', 'num_nulls'='0', 'min_value'='1', 'max_value'='6', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_mkt_desc set stats ('row_count'='42', 'ndv'='33', 'num_nulls'='0', 'min_value'='Arms increase controversial, present so', 'max_value'='Young tests could buy comfortable, local users; o', 'data_size'='2419') - """ - - sql """ - alter table call_center modify column cc_open_date_sk set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='2450794', 'max_value'='2451146', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_rec_end_date set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='21', 'min_value'='2000-01-01', 'max_value'='2001-12-31', 'data_size'='168') - """ - - sql """ - alter table catalog_returns modify column cr_order_number set stats ('row_count'='143996756', 'ndv'='93476424', 'num_nulls'='0', 'min_value'='2', 'max_value'='160000000', 'data_size'='1151974048') - """ - - sql """ - alter table catalog_returns modify column cr_return_amount set stats ('row_count'='143996756', 'ndv'='882831', 'num_nulls'='2880424', 'min_value'='0.00', 'max_value'='28805.04', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returned_date_sk set stats ('row_count'='143996756', 'ndv'='2108', 'num_nulls'='0', 'min_value'='2450821', 'max_value'='2452924', 'data_size'='1151974048') - """ - - sql """ - alter table income_band modify column ib_upper_bound set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='10000', 'max_value'='200000', 'data_size'='80') - """ - - sql """ - alter table catalog_page modify column cp_department set stats ('row_count'='30000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='DEPARTMENT', 'data_size'='297110') - """ - - sql """ - alter table catalog_page modify column cp_end_date_sk set stats ('row_count'='30000', 'ndv'='97', 'num_nulls'='302', 'min_value'='2450844', 'max_value'='2453186', 'data_size'='120000') - """ - - sql """ - alter table item modify column i_brand_id set stats ('row_count'='300000', 'ndv'='951', 'num_nulls'='763', 'min_value'='1001001', 'max_value'='10016017', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_category set stats ('row_count'='300000', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='Women', 'data_size'='1766742') - """ - - sql """ - alter table item modify column i_class_id set stats ('row_count'='300000', 'ndv'='16', 'num_nulls'='722', 'min_value'='1', 'max_value'='16', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_item_sk set stats ('row_count'='300000', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='2400000') - """ - - sql """ - alter table item modify column i_manufact_id set stats ('row_count'='300000', 'ndv'='1005', 'num_nulls'='761', 'min_value'='1', 'max_value'='1000', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_wholesale_cost set stats ('row_count'='300000', 'ndv'='7243', 'num_nulls'='740', 'min_value'='0.02', 'max_value'='89.49', 'data_size'='1200000') - """ - - sql """ - alter table web_returns modify column wr_refunded_cdemo_sk set stats ('row_count'='71997522', 'ndv'='1916366', 'num_nulls'='3240352', 'min_value'='1', 'max_value'='1920800', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_return_tax set stats ('row_count'='71997522', 'ndv'='137392', 'num_nulls'='3237729', 'min_value'='0.00', 'max_value'='2551.16', 'data_size'='287990088') - """ - - sql """ - alter table web_returns modify column wr_returning_hdemo_sk set stats ('row_count'='71997522', 'ndv'='7251', 'num_nulls'='3238239', 'min_value'='1', 'max_value'='7200', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_web_page_sk set stats ('row_count'='71997522', 'ndv'='2984', 'num_nulls'='3240387', 'min_value'='1', 'max_value'='3000', 'data_size'='575980176') - """ - - sql """ - alter table web_site modify column web_class set stats ('row_count'='54', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='371') - """ - - sql """ - alter table web_site modify column web_zip set stats ('row_count'='54', 'ndv'='32', 'num_nulls'='0', 'min_value'='14593', 'max_value'='99431', 'data_size'='270') - """ - - sql """ - alter table promotion modify column p_channel_email set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1480') - """ - - sql """ - alter table promotion modify column p_item_sk set stats ('row_count'='1500', 'ndv'='1467', 'num_nulls'='19', 'min_value'='184', 'max_value'='299990', 'data_size'='12000') - """ - - sql """ - alter table promotion modify column p_promo_name set stats ('row_count'='1500', 'ndv'='11', 'num_nulls'='0', 'min_value'='', 'max_value'='pri', 'data_size'='5896') - """ - - sql """ - alter table web_sales modify column ws_ext_discount_amt set stats ('row_count'='720000376', 'ndv'='1093513', 'num_nulls'='179851', 'min_value'='0.00', 'max_value'='29982.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_ext_list_price set stats ('row_count'='720000376', 'ndv'='1160303', 'num_nulls'='179866', 'min_value'='1.00', 'max_value'='30000.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_wholesale_cost set stats ('row_count'='720000376', 'ndv'='9905', 'num_nulls'='179834', 'min_value'='1.00', 'max_value'='100.00', 'data_size'='2880001504') - """ - - sql """ - alter table store modify column s_market_manager set stats ('row_count'='1002', 'ndv'='732', 'num_nulls'='0', 'min_value'='', 'max_value'='Zane Perez', 'data_size'='12823') - """ - - sql """ - alter table store modify column s_number_employees set stats ('row_count'='1002', 'ndv'='101', 'num_nulls'='8', 'min_value'='200', 'max_value'='300', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_rec_end_date set stats ('row_count'='1002', 'ndv'='3', 'num_nulls'='501', 'min_value'='1999-03-13', 'max_value'='2001-03-12', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_rec_start_date set stats ('row_count'='1002', 'ndv'='4', 'num_nulls'='7', 'min_value'='1997-03-13', 'max_value'='2001-03-13', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_suite_number set stats ('row_count'='1002', 'ndv'='76', 'num_nulls'='0', 'min_value'='', 'max_value'='Suite Y', 'data_size'='7866') - """ - - sql """ - alter table time_dim modify column t_hour set stats ('row_count'='86400', 'ndv'='24', 'num_nulls'='0', 'min_value'='0', 'max_value'='23', 'data_size'='345600') - """ - - sql """ - alter table time_dim modify column t_shift set stats ('row_count'='86400', 'ndv'='3', 'num_nulls'='0', 'min_value'='first', 'max_value'='third', 'data_size'='460800') - """ - - sql """ - alter table web_page modify column wp_link_count set stats ('row_count'='3000', 'ndv'='24', 'num_nulls'='27', 'min_value'='2', 'max_value'='25', 'data_size'='12000') - """ - - sql """ - alter table web_page modify column wp_rec_end_date set stats ('row_count'='3000', 'ndv'='3', 'num_nulls'='1500', 'min_value'='1999-09-03', 'max_value'='2001-09-02', 'data_size'='12000') - """ - - sql """ - alter table store_returns modify column sr_cdemo_sk set stats ('row_count'='287999764', 'ndv'='1916366', 'num_nulls'='10076902', 'min_value'='1', 'max_value'='1920800', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_item_sk set stats ('row_count'='287999764', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_cdemo_sk set stats ('row_count'='2879987999', 'ndv'='1916366', 'num_nulls'='129602155', 'min_value'='1', 'max_value'='1920800', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_ext_discount_amt set stats ('row_count'='2879987999', 'ndv'='1161208', 'num_nulls'='129609101', 'min_value'='0.00', 'max_value'='19778.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ext_wholesale_cost set stats ('row_count'='2879987999', 'ndv'='393180', 'num_nulls'='129595018', 'min_value'='1.00', 'max_value'='10000.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_list_price set stats ('row_count'='2879987999', 'ndv'='19640', 'num_nulls'='129597020', 'min_value'='1.00', 'max_value'='200.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_net_paid set stats ('row_count'='2879987999', 'ndv'='1288646', 'num_nulls'='129599407', 'min_value'='0.00', 'max_value'='19972.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_sold_date_sk set stats ('row_count'='2879987999', 'ndv'='1820', 'num_nulls'='129600843', 'min_value'='2450816', 'max_value'='2452642', 'data_size'='23039903992') - """ - - sql """ - alter table store_sales modify column ss_sold_time_sk set stats ('row_count'='2879987999', 'ndv'='47252', 'num_nulls'='129593012', 'min_value'='28800', 'max_value'='75599', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_carrier set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='AIRBORNE', 'max_value'='ZOUROS', 'data_size'='133') - """ - - sql """ - alter table customer modify column c_birth_year set stats ('row_count'='12000000', 'ndv'='69', 'num_nulls'='419584', 'min_value'='1924', 'max_value'='1992', 'data_size'='48000000') - """ - - sql """ - alter table customer modify column c_login set stats ('row_count'='12000000', 'ndv'='1', 'num_nulls'='0', 'min_value'='', 'max_value'='', 'data_size'='0') - """ - - sql """ - alter table customer modify column c_salutation set stats ('row_count'='12000000', 'ndv'='7', 'num_nulls'='0', 'min_value'='', 'max_value'='Sir', 'data_size'='37544445') - """ - - sql """ - alter table reason modify column r_reason_desc set stats ('row_count'='65', 'ndv'='64', 'num_nulls'='0', 'min_value'='Did not fit', 'max_value'='unauthoized purchase', 'data_size'='848') - """ - - sql """ - alter table date_dim modify column d_current_year set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table date_dim modify column d_dom set stats ('row_count'='73049', 'ndv'='31', 'num_nulls'='0', 'min_value'='1', 'max_value'='31', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_same_day_lq set stats ('row_count'='73049', 'ndv'='72231', 'num_nulls'='0', 'min_value'='2414930', 'max_value'='2487978', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_week_seq set stats ('row_count'='73049', 'ndv'='10448', 'num_nulls'='0', 'min_value'='1', 'max_value'='10436', 'data_size'='292196') - """ - - sql """ - alter table date_dim modify column d_weekend set stats ('row_count'='73049', 'ndv'='2', 'num_nulls'='0', 'min_value'='N', 'max_value'='Y', 'data_size'='73049') - """ - - sql """ - alter table warehouse modify column w_zip set stats ('row_count'='20', 'ndv'='18', 'num_nulls'='0', 'min_value'='19231', 'max_value'='89275', 'data_size'='100') - """ - - sql """ - alter table catalog_sales modify column cs_catalog_page_sk set stats ('row_count'='1439980416', 'ndv'='17005', 'num_nulls'='7199032', 'min_value'='1', 'max_value'='25207', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_coupon_amt set stats ('row_count'='1439980416', 'ndv'='1578778', 'num_nulls'='7198116', 'min_value'='0.00', 'max_value'='28730.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_list_price set stats ('row_count'='1439980416', 'ndv'='29396', 'num_nulls'='7201549', 'min_value'='1.00', 'max_value'='300.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_net_profit set stats ('row_count'='1439980416', 'ndv'='2058398', 'num_nulls'='0', 'min_value'='-10000.00', 'max_value'='19962.00', 'data_size'='5759921664') - """ - - sql """ - alter table catalog_sales modify column cs_order_number set stats ('row_count'='1439980416', 'ndv'='159051824', 'num_nulls'='0', 'min_value'='1', 'max_value'='160000000', 'data_size'='11519843328') - """ - - sql """ - alter table catalog_sales modify column cs_ship_hdemo_sk set stats ('row_count'='1439980416', 'ndv'='7251', 'num_nulls'='7201542', 'min_value'='1', 'max_value'='7200', 'data_size'='11519843328') - """ - - sql """ - alter table call_center modify column cc_call_center_sk set stats ('row_count'='42', 'ndv'='42', 'num_nulls'='0', 'min_value'='1', 'max_value'='42', 'data_size'='336') - """ - - sql """ - alter table call_center modify column cc_city set stats ('row_count'='42', 'ndv'='17', 'num_nulls'='0', 'min_value'='Antioch', 'max_value'='Spring Hill', 'data_size'='386') - """ - - sql """ - alter table call_center modify column cc_closed_date_sk set stats ('row_count'='42', 'ndv'='0', 'num_nulls'='42', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_gmt_offset set stats ('row_count'='42', 'ndv'='4', 'num_nulls'='0', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='168') - """ - - sql """ - alter table call_center modify column cc_hours set stats ('row_count'='42', 'ndv'='3', 'num_nulls'='0', 'min_value'='8AM-12AM', 'max_value'='8AM-8AM', 'data_size'='300') - """ - - sql """ - alter table call_center modify column cc_street_number set stats ('row_count'='42', 'ndv'='21', 'num_nulls'='0', 'min_value'='38', 'max_value'='999', 'data_size'='120') - """ - - sql """ - alter table call_center modify column cc_tax_percentage set stats ('row_count'='42', 'ndv'='12', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='0.12', 'data_size'='168') - """ - - sql """ - alter table inventory modify column inv_date_sk set stats ('row_count'='783000000', 'ndv'='261', 'num_nulls'='0', 'min_value'='2450815', 'max_value'='2452635', 'data_size'='6264000000') - """ - - sql """ - alter table inventory modify column inv_item_sk set stats ('row_count'='783000000', 'ndv'='295433', 'num_nulls'='0', 'min_value'='1', 'max_value'='300000', 'data_size'='6264000000') - """ - - sql """ - alter table catalog_returns modify column cr_fee set stats ('row_count'='143996756', 'ndv'='9958', 'num_nulls'='2882168', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_return_quantity set stats ('row_count'='143996756', 'ndv'='100', 'num_nulls'='2878774', 'min_value'='1', 'max_value'='100', 'data_size'='575987024') - """ - - sql """ - alter table catalog_returns modify column cr_returned_time_sk set stats ('row_count'='143996756', 'ndv'='87677', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='1151974048') - """ - - sql """ - alter table household_demographics modify column hd_dep_count set stats ('row_count'='7200', 'ndv'='10', 'num_nulls'='0', 'min_value'='0', 'max_value'='9', 'data_size'='28800') - """ - - sql """ - alter table customer_address modify column ca_county set stats ('row_count'='6000000', 'ndv'='1825', 'num_nulls'='0', 'min_value'='', 'max_value'='Ziebach County', 'data_size'='81254984') - """ - - sql """ - alter table income_band modify column ib_lower_bound set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='0', 'max_value'='190001', 'data_size'='80') - """ - - sql """ - alter table item modify column i_category_id set stats ('row_count'='300000', 'ndv'='10', 'num_nulls'='766', 'min_value'='1', 'max_value'='10', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_class set stats ('row_count'='300000', 'ndv'='100', 'num_nulls'='0', 'min_value'='', 'max_value'='womens watch', 'data_size'='2331199') - """ - - sql """ - alter table item modify column i_container set stats ('row_count'='300000', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='2094652') - """ - - sql """ - alter table item modify column i_current_price set stats ('row_count'='300000', 'ndv'='9685', 'num_nulls'='775', 'min_value'='0.09', 'max_value'='99.99', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_manager_id set stats ('row_count'='300000', 'ndv'='100', 'num_nulls'='744', 'min_value'='1', 'max_value'='100', 'data_size'='1200000') - """ - - sql """ - alter table item modify column i_size set stats ('row_count'='300000', 'ndv'='8', 'num_nulls'='0', 'min_value'='', 'max_value'='small', 'data_size'='1296134') - """ - - sql """ - alter table web_returns modify column wr_order_number set stats ('row_count'='71997522', 'ndv'='42383708', 'num_nulls'='0', 'min_value'='1', 'max_value'='60000000', 'data_size'='575980176') - """ - - sql """ - alter table web_returns modify column wr_refunded_cash set stats ('row_count'='71997522', 'ndv'='955369', 'num_nulls'='3240493', 'min_value'='0.00', 'max_value'='26992.92', 'data_size'='287990088') - """ - - sql """ - alter table web_site modify column web_country set stats ('row_count'='54', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='United States', 'data_size'='689') - """ - - sql """ - alter table web_site modify column web_gmt_offset set stats ('row_count'='54', 'ndv'='4', 'num_nulls'='1', 'min_value'='-8.00', 'max_value'='-5.00', 'data_size'='216') - """ - - sql """ - alter table web_site modify column web_market_manager set stats ('row_count'='54', 'ndv'='46', 'num_nulls'='0', 'min_value'='', 'max_value'='Zachery Oneil', 'data_size'='691') - """ - - sql """ - alter table web_site modify column web_site_sk set stats ('row_count'='54', 'ndv'='54', 'num_nulls'='0', 'min_value'='1', 'max_value'='54', 'data_size'='432') - """ - - sql """ - alter table web_site modify column web_street_name set stats ('row_count'='54', 'ndv'='53', 'num_nulls'='0', 'min_value'='', 'max_value'='Wilson Ridge', 'data_size'='471') - """ - - sql """ - alter table web_site modify column web_tax_percentage set stats ('row_count'='54', 'ndv'='13', 'num_nulls'='1', 'min_value'='0.00', 'max_value'='0.12', 'data_size'='216') - """ - - sql """ - alter table promotion modify column p_channel_tv set stats ('row_count'='1500', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='N', 'data_size'='1481') - """ - - sql """ - alter table promotion modify column p_response_targe set stats ('row_count'='1500', 'ndv'='1', 'num_nulls'='27', 'min_value'='1', 'max_value'='1', 'data_size'='6000') - """ - - sql """ - alter table web_sales modify column ws_bill_addr_sk set stats ('row_count'='720000376', 'ndv'='6015742', 'num_nulls'='179648', 'min_value'='1', 'max_value'='6000000', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ext_sales_price set stats ('row_count'='720000376', 'ndv'='1091003', 'num_nulls'='180023', 'min_value'='0.00', 'max_value'='29810.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_net_profit set stats ('row_count'='720000376', 'ndv'='2014057', 'num_nulls'='0', 'min_value'='-10000.00', 'max_value'='19840.00', 'data_size'='2880001504') - """ - - sql """ - alter table web_sales modify column ws_promo_sk set stats ('row_count'='720000376', 'ndv'='1489', 'num_nulls'='180016', 'min_value'='1', 'max_value'='1500', 'data_size'='5760003008') - """ - - sql """ - alter table web_sales modify column ws_ship_customer_sk set stats ('row_count'='720000376', 'ndv'='12074547', 'num_nulls'='179966', 'min_value'='1', 'max_value'='12000000', 'data_size'='5760003008') - """ - - sql """ - alter table store modify column s_division_name set stats ('row_count'='1002', 'ndv'='2', 'num_nulls'='0', 'min_value'='', 'max_value'='Unknown', 'data_size'='6965') - """ - - sql """ - alter table store modify column s_floor_space set stats ('row_count'='1002', 'ndv'='752', 'num_nulls'='6', 'min_value'='5002549', 'max_value'='9997773', 'data_size'='4008') - """ - - sql """ - alter table store modify column s_tax_precentage set stats ('row_count'='1002', 'ndv'='12', 'num_nulls'='8', 'min_value'='0.00', 'max_value'='0.11', 'data_size'='4008') - """ - - sql """ - alter table time_dim modify column t_time_id set stats ('row_count'='86400', 'ndv'='85663', 'num_nulls'='0', 'min_value'='AAAAAAAAAAAABAAA', 'max_value'='AAAAAAAAPPPPAAAA', 'data_size'='1382400') - """ - - sql """ - alter table time_dim modify column t_time_sk set stats ('row_count'='86400', 'ndv'='87677', 'num_nulls'='0', 'min_value'='0', 'max_value'='86399', 'data_size'='691200') - """ - - sql """ - alter table store_returns modify column sr_fee set stats ('row_count'='287999764', 'ndv'='9958', 'num_nulls'='10081860', 'min_value'='0.50', 'max_value'='100.00', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_reason_sk set stats ('row_count'='287999764', 'ndv'='65', 'num_nulls'='10087936', 'min_value'='1', 'max_value'='65', 'data_size'='2303998112') - """ - - sql """ - alter table store_returns modify column sr_store_credit set stats ('row_count'='287999764', 'ndv'='698161', 'num_nulls'='10077188', 'min_value'='0.00', 'max_value'='17792.48', 'data_size'='1151999056') - """ - - sql """ - alter table store_returns modify column sr_ticket_number set stats ('row_count'='287999764', 'ndv'='168770768', 'num_nulls'='0', 'min_value'='1', 'max_value'='240000000', 'data_size'='2303998112') - """ - - sql """ - alter table store_sales modify column ss_ext_list_price set stats ('row_count'='2879987999', 'ndv'='770971', 'num_nulls'='129593800', 'min_value'='1.00', 'max_value'='20000.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_ext_sales_price set stats ('row_count'='2879987999', 'ndv'='754248', 'num_nulls'='129589177', 'min_value'='0.00', 'max_value'='19972.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_net_profit set stats ('row_count'='2879987999', 'ndv'='1497362', 'num_nulls'='129572933', 'min_value'='-10000.00', 'max_value'='9986.00', 'data_size'='11519951996') - """ - - sql """ - alter table store_sales modify column ss_promo_sk set stats ('row_count'='2879987999', 'ndv'='1489', 'num_nulls'='129597096', 'min_value'='1', 'max_value'='1500', 'data_size'='23039903992') - """ - - sql """ - alter table ship_mode modify column sm_code set stats ('row_count'='20', 'ndv'='4', 'num_nulls'='0', 'min_value'='AIR', 'max_value'='SURFACE', 'data_size'='87') - """ - - sql """ - alter table ship_mode modify column sm_contract set stats ('row_count'='20', 'ndv'='20', 'num_nulls'='0', 'min_value'='2mM8l', 'max_value'='yVfotg7Tio3MVhBg6Bkn', 'data_size'='252') - """ - - sql """ - alter table customer modify column c_current_hdemo_sk set stats ('row_count'='12000000', 'ndv'='7251', 'num_nulls'='418736', 'min_value'='1', 'max_value'='7200', 'data_size'='96000000') - """ - - sql """ - alter table dbgen_version modify column dv_create_date set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='2023-07-06', 'max_value'='2023-07-06', 'data_size'='4') - """ - - sql """ - alter table dbgen_version modify column dv_create_time set stats ('row_count'='1', 'ndv'='1', 'num_nulls'='0', 'min_value'='2017-05-13 00:00:00', 'max_value'='2017-05-13 00:00:00', 'data_size'='8') - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query1.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query1.groovy deleted file mode 100644 index 52a88c9c294b1b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query1.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'TN' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100""" - qt_ds_shape_1 ''' - explain shape plan - with customer_total_return as -(select sr_customer_sk as ctr_customer_sk -,sr_store_sk as ctr_store_sk -,sum(SR_FEE) as ctr_total_return -from store_returns -,date_dim -where sr_returned_date_sk = d_date_sk -and d_year =2000 -group by sr_customer_sk -,sr_store_sk) - select c_customer_id -from customer_total_return ctr1 -,store -,customer -where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 -from customer_total_return ctr2 -where ctr1.ctr_store_sk = ctr2.ctr_store_sk) -and s_store_sk = ctr1.ctr_store_sk -and s_state = 'TN' -and ctr1.ctr_customer_sk = c_customer_sk -order by c_customer_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query10.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query10.groovy deleted file mode 100644 index edff37bb8d673a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query10.groovy +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Fairfield County','Campbell County','Washtenaw County','Escambia County','Cleburne County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 ANd 3+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100""" - qt_ds_shape_10 ''' - explain shape plan - select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3, - cd_dep_count, - count(*) cnt4, - cd_dep_employed_count, - count(*) cnt5, - cd_dep_college_count, - count(*) cnt6 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_county in ('Fairfield County','Campbell County','Washtenaw County','Escambia County','Cleburne County') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 ANd 3+3) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2001 and - d_moy between 3 and 3+3)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query11.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query11.groovy deleted file mode 100644 index 5b659ebd3d24ff..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query11.groovy +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_email_address - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1998 - and t_s_secyear.dyear = 1998+1 - and t_w_firstyear.dyear = 1998 - and t_w_secyear.dyear = 1998+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_email_address -limit 100""" - qt_ds_shape_11 ''' - explain shape plan - with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_email_address - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1998 - and t_s_secyear.dyear = 1998+1 - and t_w_firstyear.dyear = 1998 - and t_w_secyear.dyear = 1998+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_email_address -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query12.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query12.groovy deleted file mode 100644 index 90f49e49c24c7e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query12.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Men', 'Books', 'Electronics') - and ws_sold_date_sk = d_date_sk - and d_date between cast('2001-06-15' as date) - and (cast('2001-06-15' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_12 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ws_ext_sales_price) as itemrevenue - ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over - (partition by i_class) as revenueratio -from - web_sales - ,item - ,date_dim -where - ws_item_sk = i_item_sk - and i_category in ('Men', 'Books', 'Electronics') - and ws_sold_date_sk = d_date_sk - and d_date between cast('2001-06-15' as date) - and (cast('2001-06-15' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query13.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query13.groovy deleted file mode 100644 index 64da9d0bcd9de2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query13.groovy +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = 'College' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Primary' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'W' - and cd_education_status = '2 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('IL', 'TN', 'TX') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('WY', 'OH', 'ID') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MS', 'SC', 'IA') - and ss_net_profit between 50 and 250 - )) -""" - qt_ds_shape_13 ''' - explain shape plan - select avg(ss_quantity) - ,avg(ss_ext_sales_price) - ,avg(ss_ext_wholesale_cost) - ,sum(ss_ext_wholesale_cost) - from store_sales - ,store - ,customer_demographics - ,household_demographics - ,customer_address - ,date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and((ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'M' - and cd_education_status = 'College' - and ss_sales_price between 100.00 and 150.00 - and hd_dep_count = 3 - )or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'D' - and cd_education_status = 'Primary' - and ss_sales_price between 50.00 and 100.00 - and hd_dep_count = 1 - ) or - (ss_hdemo_sk=hd_demo_sk - and cd_demo_sk = ss_cdemo_sk - and cd_marital_status = 'W' - and cd_education_status = '2 yr Degree' - and ss_sales_price between 150.00 and 200.00 - and hd_dep_count = 1 - )) - and((ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('IL', 'TN', 'TX') - and ss_net_profit between 100 and 200 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('WY', 'OH', 'ID') - and ss_net_profit between 150 and 300 - ) or - (ss_addr_sk = ca_address_sk - and ca_country = 'United States' - and ca_state in ('MS', 'SC', 'IA') - and ss_net_profit between 50 and 250 - )) - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query14.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query14.groovy deleted file mode 100644 index 07ef2b99688004..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query14.groovy +++ /dev/null @@ -1,245 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 1999 AND 1999 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 1999 AND 1999 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 1999 AND 1999 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), -avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100""" - qt_ds_shape_14 ''' - explain shape plan - with cross_items as - (select i_item_sk ss_item_sk - from item, - (select iss.i_brand_id brand_id - ,iss.i_class_id class_id - ,iss.i_category_id category_id - from store_sales - ,item iss - ,date_dim d1 - where ss_item_sk = iss.i_item_sk - and ss_sold_date_sk = d1.d_date_sk - and d1.d_year between 1999 AND 1999 + 2 - intersect - select ics.i_brand_id - ,ics.i_class_id - ,ics.i_category_id - from catalog_sales - ,item ics - ,date_dim d2 - where cs_item_sk = ics.i_item_sk - and cs_sold_date_sk = d2.d_date_sk - and d2.d_year between 1999 AND 1999 + 2 - intersect - select iws.i_brand_id - ,iws.i_class_id - ,iws.i_category_id - from web_sales - ,item iws - ,date_dim d3 - where ws_item_sk = iws.i_item_sk - and ws_sold_date_sk = d3.d_date_sk - and d3.d_year between 1999 AND 1999 + 2) - t where i_brand_id = brand_id - and i_class_id = class_id - and i_category_id = category_id -), -avg_sales as - (select avg(quantity*list_price) average_sales - from (select ss_quantity quantity - ,ss_list_price list_price - from store_sales - ,date_dim - where ss_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2 - union all - select cs_quantity quantity - ,cs_list_price list_price - from catalog_sales - ,date_dim - where cs_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2 - union all - select ws_quantity quantity - ,ws_list_price list_price - from web_sales - ,date_dim - where ws_sold_date_sk = d_date_sk - and d_year between 1999 and 1999 + 2) x) - select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) - from( - select 'store' channel, i_brand_id,i_class_id - ,i_category_id,sum(ss_quantity*ss_list_price) sales - , count(*) number_sales - from store_sales - ,item - ,date_dim - where ss_item_sk in (select ss_item_sk from cross_items) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) - union all - select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales - from catalog_sales - ,item - ,date_dim - where cs_item_sk in (select ss_item_sk from cross_items) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) - union all - select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales - from web_sales - ,item - ,date_dim - where ws_item_sk in (select ss_item_sk from cross_items) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 1999+2 - and d_moy = 11 - group by i_brand_id,i_class_id,i_category_id - having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) - ) y - group by rollup (channel, i_brand_id,i_class_id,i_category_id) - order by channel,i_brand_id,i_class_id,i_category_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query15.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query15.groovy deleted file mode 100644 index 05d8d44bcdaddc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query15.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100""" - qt_ds_shape_15 ''' - explain shape plan - select ca_zip - ,sum(cs_sales_price) - from catalog_sales - ,customer - ,customer_address - ,date_dim - where cs_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', - '85392', '85460', '80348', '81792') - or ca_state in ('CA','WA','GA') - or cs_sales_price > 500) - and cs_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 2001 - group by ca_zip - order by ca_zip - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query16.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query16.groovy deleted file mode 100644 index 23e94ad465c6b8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query16.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'PA' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Williamson County','Williamson County','Williamson County','Williamson County', - 'Williamson County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100""" - qt_ds_shape_16 ''' - explain shape plan - select - count(distinct cs_order_number) as "order count" - ,sum(cs_ext_ship_cost) as "total shipping cost" - ,sum(cs_net_profit) as "total net profit" -from - catalog_sales cs1 - ,date_dim - ,customer_address - ,call_center -where - d_date between '2002-4-01' and - (cast('2002-4-01' as date) + interval 60 day) -and cs1.cs_ship_date_sk = d_date_sk -and cs1.cs_ship_addr_sk = ca_address_sk -and ca_state = 'PA' -and cs1.cs_call_center_sk = cc_call_center_sk -and cc_county in ('Williamson County','Williamson County','Williamson County','Williamson County', - 'Williamson County' -) -and exists (select * - from catalog_sales cs2 - where cs1.cs_order_number = cs2.cs_order_number - and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) -and not exists(select * - from catalog_returns cr1 - where cs1.cs_order_number = cr1.cr_order_number) -order by count(distinct cs_order_number) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query17.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query17.groovy deleted file mode 100644 index 6bde5e8c30e2d2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query17.groovy +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100""" - qt_ds_shape_17 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,s_state - ,count(ss_quantity) as store_sales_quantitycount - ,avg(ss_quantity) as store_sales_quantityave - ,stddev_samp(ss_quantity) as store_sales_quantitystdev - ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov - ,count(sr_return_quantity) as store_returns_quantitycount - ,avg(sr_return_quantity) as store_returns_quantityave - ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev - ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov - ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave - ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev - ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov - from store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where d1.d_quarter_name = '2001Q1' - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_quarter_name in ('2001Q1','2001Q2','2001Q3') - group by i_item_id - ,i_item_desc - ,s_state - order by i_item_id - ,i_item_desc - ,s_state -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query18.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query18.groovy deleted file mode 100644 index 1b3ee9b374b321..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query18.groovy +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Primary' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (1,3,7,11,10,4) and - d_year = 2001 and - ca_state in ('AL','MO','TN' - ,'GA','MT','IN','CA') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100""" - qt_ds_shape_18 ''' - explain shape plan - select i_item_id, - ca_country, - ca_state, - ca_county, - avg( cast(cs_quantity as decimal(12,2))) agg1, - avg( cast(cs_list_price as decimal(12,2))) agg2, - avg( cast(cs_coupon_amt as decimal(12,2))) agg3, - avg( cast(cs_sales_price as decimal(12,2))) agg4, - avg( cast(cs_net_profit as decimal(12,2))) agg5, - avg( cast(c_birth_year as decimal(12,2))) agg6, - avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 - from catalog_sales, customer_demographics cd1, - customer_demographics cd2, customer, customer_address, date_dim, item - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd1.cd_demo_sk and - cs_bill_customer_sk = c_customer_sk and - cd1.cd_gender = 'F' and - cd1.cd_education_status = 'Primary' and - c_current_cdemo_sk = cd2.cd_demo_sk and - c_current_addr_sk = ca_address_sk and - c_birth_month in (1,3,7,11,10,4) and - d_year = 2001 and - ca_state in ('AL','MO','TN' - ,'GA','MT','IN','CA') - group by rollup (i_item_id, ca_country, ca_state, ca_county) - order by ca_country, - ca_state, - ca_county, - i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query19.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query19.groovy deleted file mode 100644 index 43a9dbdd37a710..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query19.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=14 - and d_moy=11 - and d_year=2002 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 """ - qt_ds_shape_19 ''' - explain shape plan - select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item,customer,customer_address,store - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=14 - and d_moy=11 - and d_year=2002 - and ss_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and substr(ca_zip,1,5) <> substr(s_zip,1,5) - and ss_store_sk = s_store_sk - group by i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact - order by ext_price desc - ,i_brand - ,i_brand_id - ,i_manufact_id - ,i_manufact -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query2.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query2.groovy deleted file mode 100644 index e6a85af6b5c792..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query2.groovy +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1""" - qt_ds_shape_2 ''' - explain shape plan - with wscs as - (select sold_date_sk - ,sales_price - from (select ws_sold_date_sk sold_date_sk - ,ws_ext_sales_price sales_price - from web_sales - union all - select cs_sold_date_sk sold_date_sk - ,cs_ext_sales_price sales_price - from catalog_sales) t), - wswscs as - (select d_week_seq, - sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales - from wscs - ,date_dim - where d_date_sk = sold_date_sk - group by d_week_seq) - select d_week_seq1 - ,round(sun_sales1/sun_sales2,2) - ,round(mon_sales1/mon_sales2,2) - ,round(tue_sales1/tue_sales2,2) - ,round(wed_sales1/wed_sales2,2) - ,round(thu_sales1/thu_sales2,2) - ,round(fri_sales1/fri_sales2,2) - ,round(sat_sales1/sat_sales2,2) - from - (select wswscs.d_week_seq d_week_seq1 - ,sun_sales sun_sales1 - ,mon_sales mon_sales1 - ,tue_sales tue_sales1 - ,wed_sales wed_sales1 - ,thu_sales thu_sales1 - ,fri_sales fri_sales1 - ,sat_sales sat_sales1 - from wswscs,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998) y, - (select wswscs.d_week_seq d_week_seq2 - ,sun_sales sun_sales2 - ,mon_sales mon_sales2 - ,tue_sales tue_sales2 - ,wed_sales wed_sales2 - ,thu_sales thu_sales2 - ,fri_sales fri_sales2 - ,sat_sales sat_sales2 - from wswscs - ,date_dim - where date_dim.d_week_seq = wswscs.d_week_seq and - d_year = 1998+1) z - where d_week_seq1=d_week_seq2-53 - order by d_week_seq1 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query20.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query20.groovy deleted file mode 100644 index e132b59a3806b7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query20.groovy +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Books', 'Music', 'Sports') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-06-18' as date) - and (cast('2002-06-18' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100""" - qt_ds_shape_20 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(cs_ext_sales_price) as itemrevenue - ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over - (partition by i_class) as revenueratio - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and i_category in ('Books', 'Music', 'Sports') - and cs_sold_date_sk = d_date_sk - and d_date between cast('2002-06-18' as date) - and (cast('2002-06-18' as date) + interval 30 day) - group by i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - order by i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query21.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query21.groovy deleted file mode 100644 index 7f079f8806f79c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query21.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'SET enable_fold_constant_by_be = false' //plan shape will be different - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('1999-06-22' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('1999-06-22' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('1999-06-22' as date) - interval 30 day) - and (cast ('1999-06-22' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100""" - qt_ds_shape_21 ''' - explain shape plan - select * - from(select w_warehouse_name - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('1999-06-22' as date)) - then inv_quantity_on_hand - else 0 end) as inv_before - ,sum(case when (cast(d_date as date) >= cast ('1999-06-22' as date)) - then inv_quantity_on_hand - else 0 end) as inv_after - from inventory - ,warehouse - ,item - ,date_dim - where i_current_price between 0.99 and 1.49 - and i_item_sk = inv_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_date between (cast ('1999-06-22' as date) - interval 30 day) - and (cast ('1999-06-22' as date) + interval 30 day) - group by w_warehouse_name, i_item_id) x - where (case when inv_before > 0 - then inv_after / inv_before - else null - end) between 2.0/3.0 and 3.0/2.0 - order by w_warehouse_name - ,i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query22.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query22.groovy deleted file mode 100644 index 3eea3092249ead..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query22.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1200 and 1200 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100""" - qt_ds_shape_22 ''' - explain shape plan - select i_product_name - ,i_brand - ,i_class - ,i_category - ,avg(inv_quantity_on_hand) qoh - from inventory - ,date_dim - ,item - where inv_date_sk=d_date_sk - and inv_item_sk=i_item_sk - and d_month_seq between 1200 and 1200 + 11 - group by rollup(i_product_name - ,i_brand - ,i_class - ,i_category) -order by qoh, i_product_name, i_brand, i_class, i_category -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query23.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query23.groovy deleted file mode 100644 index 28268f8fcbcdf9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query23.groovy +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query23") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), -best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 7 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 7 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100""" - qt_ds_shape_23 ''' - explain shape plan - with frequent_ss_items as - (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt - from store_sales - ,date_dim - ,item - where ss_sold_date_sk = d_date_sk - and ss_item_sk = i_item_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by substr(i_item_desc,1,30),i_item_sk,d_date - having count(*) >4), - max_store_sales as - (select max(csales) tpcds_cmax - from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales - from store_sales - ,customer - ,date_dim - where ss_customer_sk = c_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (2000,2000+1,2000+2,2000+3) - group by c_customer_sk) t), -best_ss_customer as - (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales - from store_sales - ,customer - where ss_customer_sk = c_customer_sk - group by c_customer_sk - having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select - * -from - max_store_sales)) - select sum(sales) - from (select cs_quantity*cs_list_price sales - from catalog_sales - ,date_dim - where d_year = 2000 - and d_moy = 7 - and cs_sold_date_sk = d_date_sk - and cs_item_sk in (select item_sk from frequent_ss_items) - and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) - union all - select ws_quantity*ws_list_price sales - from web_sales - ,date_dim - where d_year = 2000 - and d_moy = 7 - and ws_sold_date_sk = d_date_sk - and ws_item_sk in (select item_sk from frequent_ss_items) - and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) t2 - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query24.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query24.groovy deleted file mode 100644 index b64674097d7b86..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query24.groovy +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query24") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_paid) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=5 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'aquamarine' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name -""" - qt_ds_shape_24 ''' - explain shape plan - with ssales as -(select c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size - ,sum(ss_net_paid) netpaid -from store_sales - ,store_returns - ,store - ,item - ,customer - ,customer_address -where ss_ticket_number = sr_ticket_number - and ss_item_sk = sr_item_sk - and ss_customer_sk = c_customer_sk - and ss_item_sk = i_item_sk - and ss_store_sk = s_store_sk - and c_current_addr_sk = ca_address_sk - and c_birth_country <> upper(ca_country) - and s_zip = ca_zip -and s_market_id=5 -group by c_last_name - ,c_first_name - ,s_store_name - ,ca_state - ,s_state - ,i_color - ,i_current_price - ,i_manager_id - ,i_units - ,i_size) -select c_last_name - ,c_first_name - ,s_store_name - ,sum(netpaid) paid -from ssales -where i_color = 'aquamarine' -group by c_last_name - ,c_first_name - ,s_store_name -having sum(netpaid) > (select 0.05*avg(netpaid) - from ssales) -order by c_last_name - ,c_first_name - ,s_store_name - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query25.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query25.groovy deleted file mode 100644 index e206d6f27dd536..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query25.groovy +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query25") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,max(ss_net_profit) as store_sales_profit - ,max(sr_net_loss) as store_returns_loss - ,max(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 1999 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_25 ''' - explain shape plan - select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,max(ss_net_profit) as store_sales_profit - ,max(sr_net_loss) as store_returns_loss - ,max(cs_net_profit) as catalog_sales_profit - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1999 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 10 - and d2.d_year = 1999 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_moy between 4 and 10 - and d3.d_year = 1999 - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query26.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query26.groovy deleted file mode 100644 index 2f2a1f1ca16a4f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query26.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query26") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'W' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2002 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_26 ''' - explain shape plan - select i_item_id, - avg(cs_quantity) agg1, - avg(cs_list_price) agg2, - avg(cs_coupon_amt) agg3, - avg(cs_sales_price) agg4 - from catalog_sales, customer_demographics, date_dim, item, promotion - where cs_sold_date_sk = d_date_sk and - cs_item_sk = i_item_sk and - cs_bill_cdemo_sk = cd_demo_sk and - cs_promo_sk = p_promo_sk and - cd_gender = 'M' and - cd_marital_status = 'W' and - cd_education_status = 'Unknown' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2002 - group by i_item_id - order by i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query27.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query27.groovy deleted file mode 100644 index 6bdbc27bc295c0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query27.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query27") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'M' and - cd_marital_status = 'W' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('TN','TN', 'TN', 'TN', 'TN', 'TN') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100""" - qt_ds_shape_27 ''' - explain shape plan - select i_item_id, - s_state, grouping(s_state) g_state, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, store, item - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_store_sk = s_store_sk and - ss_cdemo_sk = cd_demo_sk and - cd_gender = 'M' and - cd_marital_status = 'W' and - cd_education_status = 'Secondary' and - d_year = 1999 and - s_state in ('TN','TN', 'TN', 'TN', 'TN', 'TN') - group by rollup (i_item_id, s_state) - order by i_item_id - ,s_state - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query28.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query28.groovy deleted file mode 100644 index e7f8ae691933b6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query28.groovy +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query28") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 107 and 107+10 - or ss_coupon_amt between 1319 and 1319+1000 - or ss_wholesale_cost between 60 and 60+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 23 and 23+10 - or ss_coupon_amt between 825 and 825+1000 - or ss_wholesale_cost between 43 and 43+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 74 and 74+10 - or ss_coupon_amt between 4381 and 4381+1000 - or ss_wholesale_cost between 57 and 57+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 3117 and 3117+1000 - or ss_wholesale_cost between 68 and 68+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 58 and 58+10 - or ss_coupon_amt between 9402 and 9402+1000 - or ss_wholesale_cost between 38 and 38+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 64 and 64+10 - or ss_coupon_amt between 5792 and 5792+1000 - or ss_wholesale_cost between 73 and 73+20)) B6 -limit 100""" - qt_ds_shape_28 ''' - explain shape plan - select * -from (select avg(ss_list_price) B1_LP - ,count(ss_list_price) B1_CNT - ,count(distinct ss_list_price) B1_CNTD - from store_sales - where ss_quantity between 0 and 5 - and (ss_list_price between 107 and 107+10 - or ss_coupon_amt between 1319 and 1319+1000 - or ss_wholesale_cost between 60 and 60+20)) B1, - (select avg(ss_list_price) B2_LP - ,count(ss_list_price) B2_CNT - ,count(distinct ss_list_price) B2_CNTD - from store_sales - where ss_quantity between 6 and 10 - and (ss_list_price between 23 and 23+10 - or ss_coupon_amt between 825 and 825+1000 - or ss_wholesale_cost between 43 and 43+20)) B2, - (select avg(ss_list_price) B3_LP - ,count(ss_list_price) B3_CNT - ,count(distinct ss_list_price) B3_CNTD - from store_sales - where ss_quantity between 11 and 15 - and (ss_list_price between 74 and 74+10 - or ss_coupon_amt between 4381 and 4381+1000 - or ss_wholesale_cost between 57 and 57+20)) B3, - (select avg(ss_list_price) B4_LP - ,count(ss_list_price) B4_CNT - ,count(distinct ss_list_price) B4_CNTD - from store_sales - where ss_quantity between 16 and 20 - and (ss_list_price between 89 and 89+10 - or ss_coupon_amt between 3117 and 3117+1000 - or ss_wholesale_cost between 68 and 68+20)) B4, - (select avg(ss_list_price) B5_LP - ,count(ss_list_price) B5_CNT - ,count(distinct ss_list_price) B5_CNTD - from store_sales - where ss_quantity between 21 and 25 - and (ss_list_price between 58 and 58+10 - or ss_coupon_amt between 9402 and 9402+1000 - or ss_wholesale_cost between 38 and 38+20)) B5, - (select avg(ss_list_price) B6_LP - ,count(ss_list_price) B6_CNT - ,count(distinct ss_list_price) B6_CNTD - from store_sales - where ss_quantity between 26 and 30 - and (ss_list_price between 64 and 64+10 - or ss_coupon_amt between 5792 and 5792+1000 - or ss_wholesale_cost between 73 and 73+20)) B6 -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query29.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query29.groovy deleted file mode 100644 index bc0a11be1f4227..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query29.groovy +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query29") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,max(ss_quantity) as store_sales_quantity - ,max(sr_return_quantity) as store_returns_quantity - ,max(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1998 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1998 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1998,1998+1,1998+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100""" - qt_ds_shape_29 ''' - explain shape plan - select - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - ,max(ss_quantity) as store_sales_quantity - ,max(sr_return_quantity) as store_returns_quantity - ,max(cs_quantity) as catalog_sales_quantity - from - store_sales - ,store_returns - ,catalog_sales - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,item - where - d1.d_moy = 4 - and d1.d_year = 1998 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and ss_customer_sk = sr_customer_sk - and ss_item_sk = sr_item_sk - and ss_ticket_number = sr_ticket_number - and sr_returned_date_sk = d2.d_date_sk - and d2.d_moy between 4 and 4 + 3 - and d2.d_year = 1998 - and sr_customer_sk = cs_bill_customer_sk - and sr_item_sk = cs_item_sk - and cs_sold_date_sk = d3.d_date_sk - and d3.d_year in (1998,1998+1,1998+2) - group by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - order by - i_item_id - ,i_item_desc - ,s_store_id - ,s_store_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query3.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query3.groovy deleted file mode 100644 index c396e81a258154..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query3.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100""" - qt_ds_shape_3 ''' - explain shape plan - select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_sales_price) sum_agg - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manufact_id = 816 - and dt.d_moy=11 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,sum_agg desc - ,brand_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query30.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query30.groovy deleted file mode 100644 index 3677b56c442266..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query30.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query30") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2000 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'AR' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100""" - qt_ds_shape_30 ''' - explain shape plan - with customer_total_return as - (select wr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(wr_return_amt) as ctr_total_return - from web_returns - ,date_dim - ,customer_address - where wr_returned_date_sk = d_date_sk - and d_year =2000 - and wr_returning_addr_sk = ca_address_sk - group by wr_returning_customer_sk - ,ca_state) - select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'AR' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag - ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address - ,c_last_review_date_sk,ctr_total_return -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query31.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query31.groovy deleted file mode 100644 index faca7bb52750e0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query31.groovy +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query31") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 1999 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 1999 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 1999 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 1999 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 1999 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =1999 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by store_q2_q3_increase""" - qt_ds_shape_31 ''' - explain shape plan - with ss as - (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales - from store_sales,date_dim,customer_address - where ss_sold_date_sk = d_date_sk - and ss_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year), - ws as - (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales - from web_sales,date_dim,customer_address - where ws_sold_date_sk = d_date_sk - and ws_bill_addr_sk=ca_address_sk - group by ca_county,d_qoy, d_year) - select - ss1.ca_county - ,ss1.d_year - ,ws2.web_sales/ws1.web_sales web_q1_q2_increase - ,ss2.store_sales/ss1.store_sales store_q1_q2_increase - ,ws3.web_sales/ws2.web_sales web_q2_q3_increase - ,ss3.store_sales/ss2.store_sales store_q2_q3_increase - from - ss ss1 - ,ss ss2 - ,ss ss3 - ,ws ws1 - ,ws ws2 - ,ws ws3 - where - ss1.d_qoy = 1 - and ss1.d_year = 1999 - and ss1.ca_county = ss2.ca_county - and ss2.d_qoy = 2 - and ss2.d_year = 1999 - and ss2.ca_county = ss3.ca_county - and ss3.d_qoy = 3 - and ss3.d_year = 1999 - and ss1.ca_county = ws1.ca_county - and ws1.d_qoy = 1 - and ws1.d_year = 1999 - and ws1.ca_county = ws2.ca_county - and ws2.d_qoy = 2 - and ws2.d_year = 1999 - and ws1.ca_county = ws3.ca_county - and ws3.d_qoy = 3 - and ws3.d_year =1999 - and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end - > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end - and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end - > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end - order by store_q2_q3_increase - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query32.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query32.groovy deleted file mode 100644 index 7f6ed9a0c5a721..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query32.groovy +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query32") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 722 -and i_item_sk = cs_item_sk -and d_date between '2001-03-09' and - (cast('2001-03-09' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '2001-03-09' and - (cast('2001-03-09' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100""" - qt_ds_shape_32 ''' - explain shape plan - select sum(cs_ext_discount_amt) as "excess discount amount" -from - catalog_sales - ,item - ,date_dim -where -i_manufact_id = 722 -and i_item_sk = cs_item_sk -and d_date between '2001-03-09' and - (cast('2001-03-09' as date) + interval 90 day) -and d_date_sk = cs_sold_date_sk -and cs_ext_discount_amt - > ( - select - 1.3 * avg(cs_ext_discount_amt) - from - catalog_sales - ,date_dim - where - cs_item_sk = i_item_sk - and d_date between '2001-03-09' and - (cast('2001-03-09' as date) + interval 90 day) - and d_date_sk = cs_sold_date_sk - ) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query33.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query33.groovy deleted file mode 100644 index 8e1ab9f66da27e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query33.groovy +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query33") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100""" - qt_ds_shape_33 ''' - explain shape plan - with ss as ( - select - i_manufact_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - cs as ( - select - i_manufact_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id), - ws as ( - select - i_manufact_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_manufact_id in (select - i_manufact_id -from - item -where i_category in ('Books')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2001 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_manufact_id) - select i_manufact_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_manufact_id - order by total_sales -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query34.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query34.groovy deleted file mode 100644 index 98a2d27c001b41..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query34.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query34") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County', - 'Williamson County','Williamson County','Williamson County','Williamson County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number""" - qt_ds_shape_34 ''' - explain shape plan - select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '0-500') - and household_demographics.hd_vehicle_count > 0 - and (case when household_demographics.hd_vehicle_count > 0 - then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count - else null - end) > 1.2 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County', - 'Williamson County','Williamson County','Williamson County','Williamson County') - group by ss_ticket_number,ss_customer_sk) dn,customer - where ss_customer_sk = c_customer_sk - and cnt between 15 and 20 - order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query35.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query35.groovy deleted file mode 100644 index 3d68f9a85ac51c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query35.groovy +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query35") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - avg(cd_dep_count), - stddev_samp(cd_dep_count), - sum(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - avg(cd_dep_employed_count), - stddev_samp(cd_dep_employed_count), - sum(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - avg(cd_dep_college_count), - stddev_samp(cd_dep_college_count), - sum(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100""" - qt_ds_shape_35 ''' - explain shape plan - select - ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - count(*) cnt1, - avg(cd_dep_count), - stddev_samp(cd_dep_count), - sum(cd_dep_count), - cd_dep_employed_count, - count(*) cnt2, - avg(cd_dep_employed_count), - stddev_samp(cd_dep_employed_count), - sum(cd_dep_employed_count), - cd_dep_college_count, - count(*) cnt3, - avg(cd_dep_college_count), - stddev_samp(cd_dep_college_count), - sum(cd_dep_college_count) - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4) and - (exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4) or - exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 1999 and - d_qoy < 4)) - group by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - order by ca_state, - cd_gender, - cd_marital_status, - cd_dep_count, - cd_dep_employed_count, - cd_dep_college_count - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query36.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query36.groovy deleted file mode 100644 index 7a681f2906089a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query36.groovy +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query36") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('TN','TN','TN','TN', - 'TN','TN','TN','TN') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100""" - qt_ds_shape_36 ''' - explain shape plan - select - sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,item - ,store - where - d1.d_year = 2000 - and d1.d_date_sk = ss_sold_date_sk - and i_item_sk = ss_item_sk - and s_store_sk = ss_store_sk - and s_state in ('TN','TN','TN','TN', - 'TN','TN','TN','TN') - group by rollup(i_category,i_class) - order by - lochierarchy desc - ,case when lochierarchy = 0 then i_category end - ,rank_within_parent - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query37.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query37.groovy deleted file mode 100644 index 5d41320124388e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query37.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query37") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 29 and 29 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('2002-03-29' as date) and (cast('2002-03-29' as date) + interval 60 day) - and i_manufact_id in (705,742,777,944) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_37 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, catalog_sales - where i_current_price between 29 and 29 + 30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('2002-03-29' as date) and (cast('2002-03-29' as date) + interval 60 day) - and i_manufact_id in (705,742,777,944) - and inv_quantity_on_hand between 100 and 500 - and cs_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query38.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query38.groovy deleted file mode 100644 index d758d7a43561a7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query38.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query38") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 -) hot_cust -limit 100""" - qt_ds_shape_38 ''' - explain shape plan - select count(*) from ( - select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 - intersect - select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1189 and 1189 + 11 -) hot_cust -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query39.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query39.groovy deleted file mode 100644 index 5392d2b82f8c21..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query39.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query39") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =2000 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov""" - qt_ds_shape_39 ''' - explain shape plan - with inv as -(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stdev,mean, case mean when 0 then null else stdev/mean end cov - from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy - ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean - from inventory - ,item - ,warehouse - ,date_dim - where inv_item_sk = i_item_sk - and inv_warehouse_sk = w_warehouse_sk - and inv_date_sk = d_date_sk - and d_year =2000 - group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo - where case mean when 0 then 0 else stdev/mean end > 1) -select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov - ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov -from inv inv1,inv inv2 -where inv1.i_item_sk = inv2.i_item_sk - and inv1.w_warehouse_sk = inv2.w_warehouse_sk - and inv1.d_moy=1 - and inv2.d_moy=1+1 -order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov - ,inv2.d_moy,inv2.mean, inv2.cov - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query4.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query4.groovy deleted file mode 100644 index 24689dda916653..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query4.groovy +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100""" - qt_ds_shape_4 ''' - explain shape plan - with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total - ,'c' sale_type - from customer - ,catalog_sales - ,date_dim - where c_customer_sk = cs_bill_customer_sk - and cs_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year -union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,c_preferred_cust_flag customer_preferred_cust_flag - ,c_birth_country customer_birth_country - ,c_login customer_login - ,c_email_address customer_email_address - ,d_year dyear - ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - group by c_customer_id - ,c_first_name - ,c_last_name - ,c_preferred_cust_flag - ,c_birth_country - ,c_login - ,c_email_address - ,d_year - ) - select - t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_c_firstyear - ,year_total t_c_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_c_secyear.customer_id - and t_s_firstyear.customer_id = t_c_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_c_firstyear.sale_type = 'c' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_c_secyear.sale_type = 'c' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.dyear = 1999 - and t_s_secyear.dyear = 1999+1 - and t_c_firstyear.dyear = 1999 - and t_c_secyear.dyear = 1999+1 - and t_w_firstyear.dyear = 1999 - and t_w_secyear.dyear = 1999+1 - and t_s_firstyear.year_total > 0 - and t_c_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end - > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - order by t_s_secyear.customer_id - ,t_s_secyear.customer_first_name - ,t_s_secyear.customer_last_name - ,t_s_secyear.customer_birth_country -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query40.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query40.groovy deleted file mode 100644 index ac7a71d3233856..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query40.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query40") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-05-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-05-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-05-02' as date) - interval 30 day) - and (cast ('2001-05-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100""" - qt_ds_shape_40 ''' - explain shape plan - select - w_state - ,i_item_id - ,sum(case when (cast(d_date as date) < cast ('2001-05-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before - ,sum(case when (cast(d_date as date) >= cast ('2001-05-02' as date)) - then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after - from - catalog_sales left outer join catalog_returns on - (cs_order_number = cr_order_number - and cs_item_sk = cr_item_sk) - ,warehouse - ,item - ,date_dim - where - i_current_price between 0.99 and 1.49 - and i_item_sk = cs_item_sk - and cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and d_date between (cast ('2001-05-02' as date) - interval 30 day) - and (cast ('2001-05-02' as date) + interval 30 day) - group by - w_state,i_item_id - order by w_state,i_item_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query41.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query41.groovy deleted file mode 100644 index 0754878e39e7a2..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query41.groovy +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query41") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select distinct(i_product_name) - from item i1 - where i_manufact_id between 704 and 704+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'forest' or i_color = 'lime') and - (i_units = 'Pallet' or i_units = 'Pound') and - (i_size = 'economy' or i_size = 'small') - ) or - (i_category = 'Women' and - (i_color = 'navy' or i_color = 'slate') and - (i_units = 'Gross' or i_units = 'Bunch') and - (i_size = 'extra large' or i_size = 'petite') - ) or - (i_category = 'Men' and - (i_color = 'powder' or i_color = 'sky') and - (i_units = 'Dozen' or i_units = 'Lb') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'maroon' or i_color = 'smoke') and - (i_units = 'Ounce' or i_units = 'Case') and - (i_size = 'economy' or i_size = 'small') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'dark' or i_color = 'aquamarine') and - (i_units = 'Ton' or i_units = 'Tbl') and - (i_size = 'economy' or i_size = 'small') - ) or - (i_category = 'Women' and - (i_color = 'frosted' or i_color = 'plum') and - (i_units = 'Dram' or i_units = 'Box') and - (i_size = 'extra large' or i_size = 'petite') - ) or - (i_category = 'Men' and - (i_color = 'papaya' or i_color = 'peach') and - (i_units = 'Bundle' or i_units = 'Carton') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'firebrick' or i_color = 'sienna') and - (i_units = 'Cup' or i_units = 'Each') and - (i_size = 'economy' or i_size = 'small') - )))) > 0 - order by i_product_name - limit 100""" - qt_ds_shape_41 ''' - explain shape plan - select distinct(i_product_name) - from item i1 - where i_manufact_id between 704 and 704+40 - and (select count(*) as item_cnt - from item - where (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'forest' or i_color = 'lime') and - (i_units = 'Pallet' or i_units = 'Pound') and - (i_size = 'economy' or i_size = 'small') - ) or - (i_category = 'Women' and - (i_color = 'navy' or i_color = 'slate') and - (i_units = 'Gross' or i_units = 'Bunch') and - (i_size = 'extra large' or i_size = 'petite') - ) or - (i_category = 'Men' and - (i_color = 'powder' or i_color = 'sky') and - (i_units = 'Dozen' or i_units = 'Lb') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'maroon' or i_color = 'smoke') and - (i_units = 'Ounce' or i_units = 'Case') and - (i_size = 'economy' or i_size = 'small') - ))) or - (i_manufact = i1.i_manufact and - ((i_category = 'Women' and - (i_color = 'dark' or i_color = 'aquamarine') and - (i_units = 'Ton' or i_units = 'Tbl') and - (i_size = 'economy' or i_size = 'small') - ) or - (i_category = 'Women' and - (i_color = 'frosted' or i_color = 'plum') and - (i_units = 'Dram' or i_units = 'Box') and - (i_size = 'extra large' or i_size = 'petite') - ) or - (i_category = 'Men' and - (i_color = 'papaya' or i_color = 'peach') and - (i_units = 'Bundle' or i_units = 'Carton') and - (i_size = 'N/A' or i_size = 'large') - ) or - (i_category = 'Men' and - (i_color = 'firebrick' or i_color = 'sienna') and - (i_units = 'Cup' or i_units = 'Each') and - (i_size = 'economy' or i_size = 'small') - )))) > 0 - order by i_product_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query42.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query42.groovy deleted file mode 100644 index 44f5c9f0a6fff7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query42.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query42") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=1998 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 """ - qt_ds_shape_42 ''' - explain shape plan - select dt.d_year - ,item.i_category_id - ,item.i_category - ,sum(ss_ext_sales_price) - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=11 - and dt.d_year=1998 - group by dt.d_year - ,item.i_category_id - ,item.i_category - order by sum(ss_ext_sales_price) desc,dt.d_year - ,item.i_category_id - ,item.i_category -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query43.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query43.groovy deleted file mode 100644 index abb3e84a2dc74f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query43.groovy +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query43") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100""" - qt_ds_shape_43 ''' - explain shape plan - select s_store_name, s_store_id, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from date_dim, store_sales, store - where d_date_sk = ss_sold_date_sk and - s_store_sk = ss_store_sk and - s_gmt_offset = -5 and - d_year = 2000 - group by s_store_name, s_store_id - order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query44.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query44.groovy deleted file mode 100644 index 46c52ced591560..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query44.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query44") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100""" - qt_ds_shape_44 ''' - explain shape plan - select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing -from(select * - from (select item_sk,rank() over (order by rank_col asc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V1)V11 - where rnk < 11) asceding, - (select * - from (select item_sk,rank() over (order by rank_col desc) rnk - from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col - from store_sales ss1 - where ss_store_sk = 4 - group by ss_item_sk - having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col - from store_sales - where ss_store_sk = 4 - and ss_hdemo_sk is null - group by ss_store_sk))V2)V21 - where rnk < 11) descending, -item i1, -item i2 -where asceding.rnk = descending.rnk - and i1.i_item_sk=asceding.item_sk - and i2.i_item_sk=descending.item_sk -order by asceding.rnk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query45.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query45.groovy deleted file mode 100644 index 9c27eb60e16869..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query45.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query45") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100""" - qt_ds_shape_45 ''' - explain shape plan - select ca_zip, ca_city, sum(ws_sales_price) - from web_sales, customer, customer_address, date_dim, item - where ws_bill_customer_sk = c_customer_sk - and c_current_addr_sk = ca_address_sk - and ws_item_sk = i_item_sk - and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') - or - i_item_id in (select i_item_id - from item - where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) - ) - ) - and ws_sold_date_sk = d_date_sk - and d_qoy = 1 and d_year = 2000 - group by ca_zip, ca_city - order by ca_zip, ca_city - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query46.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query46.groovy deleted file mode 100644 index 44f48ecde635c3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query46.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query46") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_city in ('Midway','Fairview','Fairview','Midway','Fairview') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100""" - qt_ds_shape_46 ''' - explain shape plan - select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and (household_demographics.hd_dep_count = 8 or - household_demographics.hd_vehicle_count= 0) - and date_dim.d_dow in (6,0) - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_city in ('Midway','Fairview','Fairview','Midway','Fairview') - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query47.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query47.groovy deleted file mode 100644 index ee5856493b5f98..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query47.groovy +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query47") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2000 or - ( d_year = 2000-1 and d_moy =12) or - ( d_year = 2000+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name, v1.s_company_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2000 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100""" - qt_ds_shape_47 ''' - explain shape plan - with v1 as( - select i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, - s_store_name, s_company_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - s_store_name, s_company_name - order by d_year, d_moy) rn - from item, store_sales, date_dim, store - where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - ( - d_year = 2000 or - ( d_year = 2000-1 and d_moy =12) or - ( d_year = 2000+1 and d_moy =1) - ) - group by i_category, i_brand, - s_store_name, s_company_name, - d_year, d_moy), - v2 as( - select v1.s_store_name, v1.s_company_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1.s_store_name = v1_lag.s_store_name and - v1.s_store_name = v1_lead.s_store_name and - v1.s_company_name = v1_lag.s_company_name and - v1.s_company_name = v1_lead.s_company_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2000 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, nsum - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query48.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query48.groovy deleted file mode 100644 index fa89e8b5976f70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query48.groovy +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query48") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'S' - and - cd_education_status = 'Secondary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'M' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = 'Advanced Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('ND', 'NY', 'SD') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'GA', 'KS') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('CO', 'MN', 'NC') - and ss_net_profit between 50 and 25000 - ) - ) -""" - qt_ds_shape_48 ''' - explain shape plan - select sum (ss_quantity) - from store_sales, store, customer_demographics, customer_address, date_dim - where s_store_sk = ss_store_sk - and ss_sold_date_sk = d_date_sk and d_year = 2001 - and - ( - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'S' - and - cd_education_status = 'Secondary' - and - ss_sales_price between 100.00 and 150.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'M' - and - cd_education_status = '2 yr Degree' - and - ss_sales_price between 50.00 and 100.00 - ) - or - ( - cd_demo_sk = ss_cdemo_sk - and - cd_marital_status = 'D' - and - cd_education_status = 'Advanced Degree' - and - ss_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('ND', 'NY', 'SD') - and ss_net_profit between 0 and 2000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('MD', 'GA', 'KS') - and ss_net_profit between 150 and 3000 - ) - or - (ss_addr_sk = ca_address_sk - and - ca_country = 'United States' - and - ca_state in ('CO', 'MN', 'NC') - and ss_net_profit between 50 and 25000 - ) - ) - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query49.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query49.groovy deleted file mode 100644 index 5883d3522930e5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query49.groovy +++ /dev/null @@ -1,295 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query49") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100""" - qt_ds_shape_49 ''' - explain shape plan - select channel, item, return_ratio, return_rank, currency_rank from - (select - 'web' as channel - ,web.item - ,web.return_ratio - ,web.return_rank - ,web.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select ws.ws_item_sk as item - ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ - cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - web_sales ws left outer join web_returns wr - on (ws.ws_order_number = wr.wr_order_number and - ws.ws_item_sk = wr.wr_item_sk) - ,date_dim - where - wr.wr_return_amt > 10000 - and ws.ws_net_profit > 1 - and ws.ws_net_paid > 0 - and ws.ws_quantity > 0 - and ws_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by ws.ws_item_sk - ) in_web - ) web - where - ( - web.return_rank <= 10 - or - web.currency_rank <= 10 - ) - union - select - 'catalog' as channel - ,catalog.item - ,catalog.return_ratio - ,catalog.return_rank - ,catalog.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select - cs.cs_item_sk as item - ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ - cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - catalog_sales cs left outer join catalog_returns cr - on (cs.cs_order_number = cr.cr_order_number and - cs.cs_item_sk = cr.cr_item_sk) - ,date_dim - where - cr.cr_return_amount > 10000 - and cs.cs_net_profit > 1 - and cs.cs_net_paid > 0 - and cs.cs_quantity > 0 - and cs_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by cs.cs_item_sk - ) in_cat - ) catalog - where - ( - catalog.return_rank <= 10 - or - catalog.currency_rank <=10 - ) - union - select - 'store' as channel - ,store.item - ,store.return_ratio - ,store.return_rank - ,store.currency_rank - from ( - select - item - ,return_ratio - ,currency_ratio - ,rank() over (order by return_ratio) as return_rank - ,rank() over (order by currency_ratio) as currency_rank - from - ( select sts.ss_item_sk as item - ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio - ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio - from - store_sales sts left outer join store_returns sr - on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) - ,date_dim - where - sr.sr_return_amt > 10000 - and sts.ss_net_profit > 1 - and sts.ss_net_paid > 0 - and sts.ss_quantity > 0 - and ss_sold_date_sk = d_date_sk - and d_year = 1998 - and d_moy = 11 - group by sts.ss_item_sk - ) in_store - ) store - where ( - store.return_rank <= 10 - or - store.currency_rank <= 10 - ) - ) - t order by 1,4,5,2 - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query5.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query5.groovy deleted file mode 100644 index f1db7b7207b66d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query5.groovy +++ /dev/null @@ -1,293 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_5 ''' - explain shape plan - with ssr as - (select s_store_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ss_store_sk as store_sk, - ss_sold_date_sk as date_sk, - ss_ext_sales_price as sales_price, - ss_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from store_sales - union all - select sr_store_sk as store_sk, - sr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - sr_return_amt as return_amt, - sr_net_loss as net_loss - from store_returns - ) salesreturns, - date_dim, - store - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and store_sk = s_store_sk - group by s_store_id) - , - csr as - (select cp_catalog_page_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select cs_catalog_page_sk as page_sk, - cs_sold_date_sk as date_sk, - cs_ext_sales_price as sales_price, - cs_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from catalog_sales - union all - select cr_catalog_page_sk as page_sk, - cr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - cr_return_amount as return_amt, - cr_net_loss as net_loss - from catalog_returns - ) salesreturns, - date_dim, - catalog_page - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and page_sk = cp_catalog_page_sk - group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(sales_price) as sales, - sum(profit) as profit, - sum(return_amt) as returns, - sum(net_loss) as profit_loss - from - ( select ws_web_site_sk as wsr_web_site_sk, - ws_sold_date_sk as date_sk, - ws_ext_sales_price as sales_price, - ws_net_profit as profit, - cast(0 as decimal(7,2)) as return_amt, - cast(0 as decimal(7,2)) as net_loss - from web_sales - union all - select ws_web_site_sk as wsr_web_site_sk, - wr_returned_date_sk as date_sk, - cast(0 as decimal(7,2)) as sales_price, - cast(0 as decimal(7,2)) as profit, - wr_return_amt as return_amt, - wr_net_loss as net_loss - from web_returns left outer join web_sales on - ( wr_item_sk = ws_item_sk - and wr_order_number = ws_order_number) - ) salesreturns, - date_dim, - web_site - where date_sk = d_date_sk - and d_date between cast('2000-08-19' as date) - and (cast('2000-08-19' as date) + interval 14 day) - and wsr_web_site_sk = web_site_sk - group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', s_store_id) id - , sales - , returns - , (profit - profit_loss) as profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', cp_catalog_page_id) id - , sales - , returns - , (profit - profit_loss) as profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) id - , sales - , returns - , (profit - profit_loss) as profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query50.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query50.groovy deleted file mode 100644 index 3a0a41bad7a6d9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query50.groovy +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query50") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100""" - qt_ds_shape_50 ''' - explain shape plan - select - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and - (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and - (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and - (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - store_sales - ,store_returns - ,store - ,date_dim d1 - ,date_dim d2 -where - d2.d_year = 2001 -and d2.d_moy = 8 -and ss_ticket_number = sr_ticket_number -and ss_item_sk = sr_item_sk -and ss_sold_date_sk = d1.d_date_sk -and sr_returned_date_sk = d2.d_date_sk -and ss_customer_sk = sr_customer_sk -and ss_store_sk = s_store_sk -group by - s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -order by s_store_name - ,s_company_id - ,s_street_number - ,s_street_name - ,s_street_type - ,s_suite_number - ,s_city - ,s_county - ,s_state - ,s_zip -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query51.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query51.groovy deleted file mode 100644 index d7fe227ff00e3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query51.groovy +++ /dev/null @@ -1,147 +0,0 @@ -import java.util.stream.Collectors - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query51") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_common_expr_pushdown=false" - sql "set enable_function_pushdown=true" - sql "set enable_parallel_result_sink=false" - sql "set experimental_parallel_scan_max_scanners_count=16" - sql "set experimental_parallel_scan_min_rows_per_scanner=128" - sql "set fragment_transmission_compression_codec=lz4" - sql "set insert_visible_timeout_ms=60000" - sql "set partitioned_hash_agg_rows_threshold=1048576" - sql "set partitioned_hash_join_rows_threshold=8" - sql "set topn_opt_limit_threshold=1" - sql "set wait_timeout=31000" - - - def variables = sql "show variables" - def variableString = variables.stream() - .map { it.toString() } - .collect(Collectors.joining("\n")) - logger.info("Variables:\n${variableString}") - - def ds = """WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1212 and 1212+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1212 and 1212+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100""" - qt_ds_shape_51 ''' - explain shape plan - WITH web_v1 as ( -select - ws_item_sk item_sk, d_date, - sum(sum(ws_sales_price)) - over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from web_sales - ,date_dim -where ws_sold_date_sk=d_date_sk - and d_month_seq between 1212 and 1212+11 - and ws_item_sk is not NULL -group by ws_item_sk, d_date), -store_v1 as ( -select - ss_item_sk item_sk, d_date, - sum(sum(ss_sales_price)) - over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales -from store_sales - ,date_dim -where ss_sold_date_sk=d_date_sk - and d_month_seq between 1212 and 1212+11 - and ss_item_sk is not NULL -group by ss_item_sk, d_date) - select * -from (select item_sk - ,d_date - ,web_sales - ,store_sales - ,max(web_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative - ,max(store_sales) - over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative - from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk - ,case when web.d_date is not null then web.d_date else store.d_date end d_date - ,web.cume_sales web_sales - ,store.cume_sales store_sales - from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk - and web.d_date = store.d_date) - )x )y -where web_cumulative > store_cumulative -order by item_sk - ,d_date -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query52.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query52.groovy deleted file mode 100644 index 327bf50ae0ed32..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query52.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query52") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2000 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 """ - qt_ds_shape_52 ''' - explain shape plan - select dt.d_year - ,item.i_brand_id brand_id - ,item.i_brand brand - ,sum(ss_ext_sales_price) ext_price - from date_dim dt - ,store_sales - ,item - where dt.d_date_sk = store_sales.ss_sold_date_sk - and store_sales.ss_item_sk = item.i_item_sk - and item.i_manager_id = 1 - and dt.d_moy=12 - and dt.d_year=2000 - group by dt.d_year - ,item.i_brand - ,item.i_brand_id - order by dt.d_year - ,ext_price desc - ,brand_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query53.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query53.groovy deleted file mode 100644 index b23a25424e4a6f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query53.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query53") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1186,1186+1,1186+2,1186+3,1186+4,1186+5,1186+6,1186+7,1186+8,1186+9,1186+10,1186+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100""" - qt_ds_shape_53 ''' - explain shape plan - select * from -(select i_manufact_id, -sum(ss_sales_price) sum_sales, -avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and -ss_sold_date_sk = d_date_sk and -ss_store_sk = s_store_sk and -d_month_seq in (1186,1186+1,1186+2,1186+3,1186+4,1186+5,1186+6,1186+7,1186+8,1186+9,1186+10,1186+11) and -((i_category in ('Books','Children','Electronics') and -i_class in ('personal','portable','reference','self-help') and -i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) -or(i_category in ('Women','Music','Men') and -i_class in ('accessories','classical','fragrances','pants') and -i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manufact_id, d_qoy ) tmp1 -where case when avg_quarterly_sales > 0 - then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales - else null end > 0.1 -order by avg_quarterly_sales, - sum_sales, - i_manufact_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query54.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query54.groovy deleted file mode 100644 index 1ee7c497f8f1c1..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query54.groovy +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query54") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Music' - and i_class = 'country' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 1 - and d_year = 1999 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1999 and d_moy = 1) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1999 and d_moy = 1) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100""" - qt_ds_shape_54 ''' - explain shape plan - with my_customers as ( - select distinct c_customer_sk - , c_current_addr_sk - from - ( select cs_sold_date_sk sold_date_sk, - cs_bill_customer_sk customer_sk, - cs_item_sk item_sk - from catalog_sales - union all - select ws_sold_date_sk sold_date_sk, - ws_bill_customer_sk customer_sk, - ws_item_sk item_sk - from web_sales - ) cs_or_ws_sales, - item, - date_dim, - customer - where sold_date_sk = d_date_sk - and item_sk = i_item_sk - and i_category = 'Music' - and i_class = 'country' - and c_customer_sk = cs_or_ws_sales.customer_sk - and d_moy = 1 - and d_year = 1999 - ) - , my_revenue as ( - select c_customer_sk, - sum(ss_ext_sales_price) as revenue - from my_customers, - store_sales, - customer_address, - store, - date_dim - where c_current_addr_sk = ca_address_sk - and ca_county = s_county - and ca_state = s_state - and ss_sold_date_sk = d_date_sk - and c_customer_sk = ss_customer_sk - and d_month_seq between (select distinct d_month_seq+1 - from date_dim where d_year = 1999 and d_moy = 1) - and (select distinct d_month_seq+3 - from date_dim where d_year = 1999 and d_moy = 1) - group by c_customer_sk - ) - , segments as - (select cast((revenue/50) as int) as segment - from my_revenue - ) - select segment, count(*) as num_customers, segment*50 as segment_base - from segments - group by segment - order by segment, num_customers - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query55.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query55.groovy deleted file mode 100644 index c2d4c1db731a68..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query55.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query55") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=52 - and d_moy=11 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 """ - qt_ds_shape_55 ''' - explain shape plan - select i_brand_id brand_id, i_brand brand, - sum(ss_ext_sales_price) ext_price - from date_dim, store_sales, item - where d_date_sk = ss_sold_date_sk - and ss_item_sk = i_item_sk - and i_manager_id=52 - and d_moy=11 - and d_year=2000 - group by i_brand, i_brand_id - order by ext_price desc, i_brand_id -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query56.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query56.groovy deleted file mode 100644 index f9a99301af1af6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query56.groovy +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query56") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100""" - qt_ds_shape_56 ''' - explain shape plan - with ss as ( - select i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - cs as ( - select i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id), - ws as ( - select i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from item -where i_color in ('powder','orchid','pink')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 3 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -6 - group by i_item_id) - select i_item_id ,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by total_sales, - i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query57.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query57.groovy deleted file mode 100644 index b1c5ec41918363..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query57.groovy +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query57") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_category, v1.i_brand, v1.cc_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, avg_monthly_sales - limit 100""" - qt_ds_shape_57 ''' - explain shape plan - with v1 as( - select i_category, i_brand, - cc_name, - d_year, d_moy, - sum(cs_sales_price) sum_sales, - avg(sum(cs_sales_price)) over - (partition by i_category, i_brand, - cc_name, d_year) - avg_monthly_sales, - rank() over - (partition by i_category, i_brand, - cc_name - order by d_year, d_moy) rn - from item, catalog_sales, date_dim, call_center - where cs_item_sk = i_item_sk and - cs_sold_date_sk = d_date_sk and - cc_call_center_sk= cs_call_center_sk and - ( - d_year = 2001 or - ( d_year = 2001-1 and d_moy =12) or - ( d_year = 2001+1 and d_moy =1) - ) - group by i_category, i_brand, - cc_name , d_year, d_moy), - v2 as( - select v1.i_category, v1.i_brand, v1.cc_name - ,v1.d_year - ,v1.avg_monthly_sales - ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum - from v1, v1 v1_lag, v1 v1_lead - where v1.i_category = v1_lag.i_category and - v1.i_category = v1_lead.i_category and - v1.i_brand = v1_lag.i_brand and - v1.i_brand = v1_lead.i_brand and - v1. cc_name = v1_lag. cc_name and - v1. cc_name = v1_lead. cc_name and - v1.rn = v1_lag.rn + 1 and - v1.rn = v1_lead.rn - 1) - select * - from v2 - where d_year = 2001 and - avg_monthly_sales > 0 and - case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 - order by sum_sales - avg_monthly_sales, avg_monthly_sales - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query58.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query58.groovy deleted file mode 100644 index 5d618277923ebb..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query58.groovy +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query58") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100""" - qt_ds_shape_58 ''' - explain shape plan - with ss_items as - (select i_item_id item_id - ,sum(ss_ext_sales_price) ss_item_rev - from store_sales - ,item - ,date_dim - where ss_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and ss_sold_date_sk = d_date_sk - group by i_item_id), - cs_items as - (select i_item_id item_id - ,sum(cs_ext_sales_price) cs_item_rev - from catalog_sales - ,item - ,date_dim - where cs_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq = (select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and cs_sold_date_sk = d_date_sk - group by i_item_id), - ws_items as - (select i_item_id item_id - ,sum(ws_ext_sales_price) ws_item_rev - from web_sales - ,item - ,date_dim - where ws_item_sk = i_item_sk - and d_date in (select d_date - from date_dim - where d_week_seq =(select d_week_seq - from date_dim - where d_date = '2001-06-16')) - and ws_sold_date_sk = d_date_sk - group by i_item_id) - select ss_items.item_id - ,ss_item_rev - ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev - ,cs_item_rev - ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev - ,ws_item_rev - ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev - ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average - from ss_items,cs_items,ws_items - where ss_items.item_id=cs_items.item_id - and ss_items.item_id=ws_items.item_id - and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev - and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev - and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev - order by item_id - ,ss_item_rev - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query59.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query59.groovy deleted file mode 100644 index 9bda7dd0b108ce..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query59.groovy +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query59") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - wss.ss_store_sk = s_store_sk and - d_month_seq between 1195 and 1195 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - wss.ss_store_sk = s_store_sk and - d_month_seq between 1195+ 12 and 1195 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100""" - qt_ds_shape_59 ''' - explain shape plan - with wss as - (select d_week_seq, - ss_store_sk, - sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, - sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, - sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, - sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, - sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, - sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, - sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - group by d_week_seq,ss_store_sk - ) - select s_store_name1,s_store_id1,d_week_seq1 - ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 - ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 - ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 - from - (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 - ,s_store_id s_store_id1,sun_sales sun_sales1 - ,mon_sales mon_sales1,tue_sales tue_sales1 - ,wed_sales wed_sales1,thu_sales thu_sales1 - ,fri_sales fri_sales1,sat_sales sat_sales1 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - wss.ss_store_sk = s_store_sk and - d_month_seq between 1195 and 1195 + 11) y, - (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 - ,s_store_id s_store_id2,sun_sales sun_sales2 - ,mon_sales mon_sales2,tue_sales tue_sales2 - ,wed_sales wed_sales2,thu_sales thu_sales2 - ,fri_sales fri_sales2,sat_sales sat_sales2 - from wss,store,date_dim d - where d.d_week_seq = wss.d_week_seq and - wss.ss_store_sk = s_store_sk and - d_month_seq between 1195+ 12 and 1195 + 23) x - where s_store_id1=s_store_id2 - and d_week_seq1=d_week_seq2-52 - order by s_store_name1,s_store_id1,d_week_seq1 -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query6.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query6.groovy deleted file mode 100644 index 5267c9b608cf70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query6.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100""" - qt_ds_shape_6 ''' - explain shape plan - select a.ca_state state, count(*) cnt - from customer_address a - ,customer c - ,store_sales s - ,date_dim d - ,item i - where a.ca_address_sk = c.c_current_addr_sk - and c.c_customer_sk = s.ss_customer_sk - and s.ss_sold_date_sk = d.d_date_sk - and s.ss_item_sk = i.i_item_sk - and d.d_month_seq = - (select distinct (d_month_seq) - from date_dim - where d_year = 2002 - and d_moy = 3 ) - and i.i_current_price > 1.2 * - (select avg(j.i_current_price) - from item j - where j.i_category = i.i_category) - group by a.ca_state - having count(*) >= 10 - order by cnt, a.ca_state - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query60.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query60.groovy deleted file mode 100644 index 362298991deb80..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query60.groovy +++ /dev/null @@ -1,193 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query60") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100""" - qt_ds_shape_60 ''' - explain shape plan - with ss as ( - select - i_item_id,sum(ss_ext_sales_price) total_sales - from - store_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and ss_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id), - cs as ( - select - i_item_id,sum(cs_ext_sales_price) total_sales - from - catalog_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and cs_item_sk = i_item_sk - and cs_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and cs_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id), - ws as ( - select - i_item_id,sum(ws_ext_sales_price) total_sales - from - web_sales, - date_dim, - customer_address, - item - where - i_item_id in (select - i_item_id -from - item -where i_category in ('Jewelry')) - and ws_item_sk = i_item_sk - and ws_sold_date_sk = d_date_sk - and d_year = 2000 - and d_moy = 10 - and ws_bill_addr_sk = ca_address_sk - and ca_gmt_offset = -5 - group by i_item_id) - select - i_item_id -,sum(total_sales) total_sales - from (select * from ss - union all - select * from cs - union all - select * from ws) tmp1 - group by i_item_id - order by i_item_id - ,total_sales - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query61.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query61.groovy deleted file mode 100644 index d2e1029fa5c139..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query61.groovy +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query61") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) all_sales -order by promotions, total -limit 100""" - qt_ds_shape_61 ''' - explain shape plan - select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 -from - (select sum(ss_ext_sales_price) promotions - from store_sales - ,store - ,promotion - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_promo_sk = p_promo_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) promotional_sales, - (select sum(ss_ext_sales_price) total - from store_sales - ,store - ,date_dim - ,customer - ,customer_address - ,item - where ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and ss_customer_sk= c_customer_sk - and ca_address_sk = c_current_addr_sk - and ss_item_sk = i_item_sk - and ca_gmt_offset = -7 - and i_category = 'Home' - and s_gmt_offset = -7 - and d_year = 2000 - and d_moy = 12) all_sales -order by promotions, total -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query62.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query62.groovy deleted file mode 100644 index 686760596adddd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query62.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query62") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1223 and 1223 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100""" - qt_ds_shape_62 ''' - explain shape plan - select - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and - (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and - (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and - (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - web_sales - ,warehouse - ,ship_mode - ,web_site - ,date_dim -where - d_month_seq between 1223 and 1223 + 11 -and ws_ship_date_sk = d_date_sk -and ws_warehouse_sk = w_warehouse_sk -and ws_ship_mode_sk = sm_ship_mode_sk -and ws_web_site_sk = web_site_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,web_name -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query63.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query63.groovy deleted file mode 100644 index 1b0cd11db4190b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query63.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query63") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1222,1222+1,1222+2,1222+3,1222+4,1222+5,1222+6,1222+7,1222+8,1222+9,1222+10,1222+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100""" - qt_ds_shape_63 ''' - explain shape plan - select * -from (select i_manager_id - ,sum(ss_sales_price) sum_sales - ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales - from item - ,store_sales - ,date_dim - ,store - where ss_item_sk = i_item_sk - and ss_sold_date_sk = d_date_sk - and ss_store_sk = s_store_sk - and d_month_seq in (1222,1222+1,1222+2,1222+3,1222+4,1222+5,1222+6,1222+7,1222+8,1222+9,1222+10,1222+11) - and (( i_category in ('Books','Children','Electronics') - and i_class in ('personal','portable','reference','self-help') - and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', - 'exportiunivamalg #9','scholaramalgamalg #9')) - or( i_category in ('Women','Music','Men') - and i_class in ('accessories','classical','fragrances','pants') - and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', - 'importoamalg #1'))) -group by i_manager_id, d_moy) tmp1 -where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 -order by i_manager_id - ,avg_monthly_sales - ,sum_sales -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query64.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query64.groovy deleted file mode 100644 index ae63cbc2dab186..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query64.groovy +++ /dev/null @@ -1,279 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query64") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('orange','lace','lawn','misty','blush','pink') and - i_current_price between 48 and 48 + 10 and - i_current_price between 48 + 1 and 48 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 1999 and - cs2.syear = 1999 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1""" - qt_ds_shape_64 ''' - explain shape plan - with cs_ui as - (select cs_item_sk - ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund - from catalog_sales - ,catalog_returns - where cs_item_sk = cr_item_sk - and cs_order_number = cr_order_number - group by cs_item_sk - having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), -cross_sales as - (select i_product_name product_name - ,i_item_sk item_sk - ,s_store_name store_name - ,s_zip store_zip - ,ad1.ca_street_number b_street_number - ,ad1.ca_street_name b_street_name - ,ad1.ca_city b_city - ,ad1.ca_zip b_zip - ,ad2.ca_street_number c_street_number - ,ad2.ca_street_name c_street_name - ,ad2.ca_city c_city - ,ad2.ca_zip c_zip - ,d1.d_year as syear - ,d2.d_year as fsyear - ,d3.d_year s2year - ,count(*) cnt - ,sum(ss_wholesale_cost) s1 - ,sum(ss_list_price) s2 - ,sum(ss_coupon_amt) s3 - FROM store_sales - ,store_returns - ,cs_ui - ,date_dim d1 - ,date_dim d2 - ,date_dim d3 - ,store - ,customer - ,customer_demographics cd1 - ,customer_demographics cd2 - ,promotion - ,household_demographics hd1 - ,household_demographics hd2 - ,customer_address ad1 - ,customer_address ad2 - ,income_band ib1 - ,income_band ib2 - ,item - WHERE ss_store_sk = s_store_sk AND - ss_sold_date_sk = d1.d_date_sk AND - ss_customer_sk = c_customer_sk AND - ss_cdemo_sk= cd1.cd_demo_sk AND - ss_hdemo_sk = hd1.hd_demo_sk AND - ss_addr_sk = ad1.ca_address_sk and - ss_item_sk = i_item_sk and - ss_item_sk = sr_item_sk and - ss_ticket_number = sr_ticket_number and - ss_item_sk = cs_ui.cs_item_sk and - c_current_cdemo_sk = cd2.cd_demo_sk AND - c_current_hdemo_sk = hd2.hd_demo_sk AND - c_current_addr_sk = ad2.ca_address_sk and - c_first_sales_date_sk = d2.d_date_sk and - c_first_shipto_date_sk = d3.d_date_sk and - ss_promo_sk = p_promo_sk and - hd1.hd_income_band_sk = ib1.ib_income_band_sk and - hd2.hd_income_band_sk = ib2.ib_income_band_sk and - cd1.cd_marital_status <> cd2.cd_marital_status and - i_color in ('orange','lace','lawn','misty','blush','pink') and - i_current_price between 48 and 48 + 10 and - i_current_price between 48 + 1 and 48 + 15 -group by i_product_name - ,i_item_sk - ,s_store_name - ,s_zip - ,ad1.ca_street_number - ,ad1.ca_street_name - ,ad1.ca_city - ,ad1.ca_zip - ,ad2.ca_street_number - ,ad2.ca_street_name - ,ad2.ca_city - ,ad2.ca_zip - ,d1.d_year - ,d2.d_year - ,d3.d_year -) -select cs1.product_name - ,cs1.store_name - ,cs1.store_zip - ,cs1.b_street_number - ,cs1.b_street_name - ,cs1.b_city - ,cs1.b_zip - ,cs1.c_street_number - ,cs1.c_street_name - ,cs1.c_city - ,cs1.c_zip - ,cs1.syear - ,cs1.cnt - ,cs1.s1 as s11 - ,cs1.s2 as s21 - ,cs1.s3 as s31 - ,cs2.s1 as s12 - ,cs2.s2 as s22 - ,cs2.s3 as s32 - ,cs2.syear - ,cs2.cnt -from cross_sales cs1,cross_sales cs2 -where cs1.item_sk=cs2.item_sk and - cs1.syear = 1999 and - cs2.syear = 1999 + 1 and - cs2.cnt <= cs1.cnt and - cs1.store_name = cs2.store_name and - cs1.store_zip = cs2.store_zip -order by cs1.product_name - ,cs1.store_name - ,cs2.cnt - ,cs1.s1 - ,cs2.s1 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query65.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query65.groovy deleted file mode 100644 index 25955c1ce93d77..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query65.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query65") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1176 and 1176+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1176 and 1176+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100""" - qt_ds_shape_65 ''' - explain shape plan - select - s_store_name, - i_item_desc, - sc.revenue, - i_current_price, - i_wholesale_cost, - i_brand - from store, item, - (select ss_store_sk, avg(revenue) as ave - from - (select ss_store_sk, ss_item_sk, - sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1176 and 1176+11 - group by ss_store_sk, ss_item_sk) sa - group by ss_store_sk) sb, - (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue - from store_sales, date_dim - where ss_sold_date_sk = d_date_sk and d_month_seq between 1176 and 1176+11 - group by ss_store_sk, ss_item_sk) sc - where sb.ss_store_sk = sc.ss_store_sk and - sc.revenue <= 0.1 * sb.ave and - s_store_sk = sc.ss_store_sk and - i_item_sk = sc.ss_item_sk - order by s_store_name, i_item_desc -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query66.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query66.groovy deleted file mode 100644 index 19cd030b878127..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query66.groovy +++ /dev/null @@ -1,477 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query66") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('ORIENTAL ', ','), ' BOXBUNDLES') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 2001 - and t_time between 42970 and 42970+28800 - and sm_carrier in ('ORIENTAL','BOXBUNDLES') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('ORIENTAL ', ','), ' BOXBUNDLES') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 2001 - and t_time between 42970 AND 42970+28800 - and sm_carrier in ('ORIENTAL','BOXBUNDLES') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100""" - qt_ds_shape_66 ''' - explain shape plan - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - ,sum(jan_sales) as jan_sales - ,sum(feb_sales) as feb_sales - ,sum(mar_sales) as mar_sales - ,sum(apr_sales) as apr_sales - ,sum(may_sales) as may_sales - ,sum(jun_sales) as jun_sales - ,sum(jul_sales) as jul_sales - ,sum(aug_sales) as aug_sales - ,sum(sep_sales) as sep_sales - ,sum(oct_sales) as oct_sales - ,sum(nov_sales) as nov_sales - ,sum(dec_sales) as dec_sales - ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot - ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot - ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot - ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot - ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot - ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot - ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot - ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot - ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot - ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot - ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot - ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot - ,sum(jan_net) as jan_net - ,sum(feb_net) as feb_net - ,sum(mar_net) as mar_net - ,sum(apr_net) as apr_net - ,sum(may_net) as may_net - ,sum(jun_net) as jun_net - ,sum(jul_net) as jul_net - ,sum(aug_net) as aug_net - ,sum(sep_net) as sep_net - ,sum(oct_net) as oct_net - ,sum(nov_net) as nov_net - ,sum(dec_net) as dec_net - from ( - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('ORIENTAL ', ','), ' BOXBUNDLES') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then ws_ext_sales_price* ws_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then ws_ext_sales_price* ws_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then ws_ext_sales_price* ws_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then ws_ext_sales_price* ws_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then ws_ext_sales_price* ws_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then ws_ext_sales_price* ws_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then ws_ext_sales_price* ws_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then ws_ext_sales_price* ws_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then ws_ext_sales_price* ws_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then ws_ext_sales_price* ws_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then ws_ext_sales_price* ws_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then ws_ext_sales_price* ws_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then ws_net_paid_inc_ship * ws_quantity else 0 end) as dec_net - from - web_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - ws_warehouse_sk = w_warehouse_sk - and ws_sold_date_sk = d_date_sk - and ws_sold_time_sk = t_time_sk - and ws_ship_mode_sk = sm_ship_mode_sk - and d_year = 2001 - and t_time between 42970 and 42970+28800 - and sm_carrier in ('ORIENTAL','BOXBUNDLES') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - union all - select - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,concat(concat('ORIENTAL ', ','), ' BOXBUNDLES') as ship_carriers - ,d_year as year - ,sum(case when d_moy = 1 - then cs_ext_list_price* cs_quantity else 0 end) as jan_sales - ,sum(case when d_moy = 2 - then cs_ext_list_price* cs_quantity else 0 end) as feb_sales - ,sum(case when d_moy = 3 - then cs_ext_list_price* cs_quantity else 0 end) as mar_sales - ,sum(case when d_moy = 4 - then cs_ext_list_price* cs_quantity else 0 end) as apr_sales - ,sum(case when d_moy = 5 - then cs_ext_list_price* cs_quantity else 0 end) as may_sales - ,sum(case when d_moy = 6 - then cs_ext_list_price* cs_quantity else 0 end) as jun_sales - ,sum(case when d_moy = 7 - then cs_ext_list_price* cs_quantity else 0 end) as jul_sales - ,sum(case when d_moy = 8 - then cs_ext_list_price* cs_quantity else 0 end) as aug_sales - ,sum(case when d_moy = 9 - then cs_ext_list_price* cs_quantity else 0 end) as sep_sales - ,sum(case when d_moy = 10 - then cs_ext_list_price* cs_quantity else 0 end) as oct_sales - ,sum(case when d_moy = 11 - then cs_ext_list_price* cs_quantity else 0 end) as nov_sales - ,sum(case when d_moy = 12 - then cs_ext_list_price* cs_quantity else 0 end) as dec_sales - ,sum(case when d_moy = 1 - then cs_net_paid * cs_quantity else 0 end) as jan_net - ,sum(case when d_moy = 2 - then cs_net_paid * cs_quantity else 0 end) as feb_net - ,sum(case when d_moy = 3 - then cs_net_paid * cs_quantity else 0 end) as mar_net - ,sum(case when d_moy = 4 - then cs_net_paid * cs_quantity else 0 end) as apr_net - ,sum(case when d_moy = 5 - then cs_net_paid * cs_quantity else 0 end) as may_net - ,sum(case when d_moy = 6 - then cs_net_paid * cs_quantity else 0 end) as jun_net - ,sum(case when d_moy = 7 - then cs_net_paid * cs_quantity else 0 end) as jul_net - ,sum(case when d_moy = 8 - then cs_net_paid * cs_quantity else 0 end) as aug_net - ,sum(case when d_moy = 9 - then cs_net_paid * cs_quantity else 0 end) as sep_net - ,sum(case when d_moy = 10 - then cs_net_paid * cs_quantity else 0 end) as oct_net - ,sum(case when d_moy = 11 - then cs_net_paid * cs_quantity else 0 end) as nov_net - ,sum(case when d_moy = 12 - then cs_net_paid * cs_quantity else 0 end) as dec_net - from - catalog_sales - ,warehouse - ,date_dim - ,time_dim - ,ship_mode - where - cs_warehouse_sk = w_warehouse_sk - and cs_sold_date_sk = d_date_sk - and cs_sold_time_sk = t_time_sk - and cs_ship_mode_sk = sm_ship_mode_sk - and d_year = 2001 - and t_time between 42970 AND 42970+28800 - and sm_carrier in ('ORIENTAL','BOXBUNDLES') - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,d_year - ) x - group by - w_warehouse_name - ,w_warehouse_sq_ft - ,w_city - ,w_county - ,w_state - ,w_country - ,ship_carriers - ,year - order by w_warehouse_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query67.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query67.groovy deleted file mode 100644 index 881919b24ab14d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query67.groovy +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query67") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1217 and 1217+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100""" - qt_ds_shape_67 ''' - explain shape plan - select * -from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rank() over (partition by i_category order by sumsales desc) rk - from (select i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales - from store_sales - ,date_dim - ,store - ,item - where ss_sold_date_sk=d_date_sk - and ss_item_sk=i_item_sk - and ss_store_sk = s_store_sk - and d_month_seq between 1217 and 1217+11 - group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 -where rk <= 100 -order by i_category - ,i_class - ,i_brand - ,i_product_name - ,d_year - ,d_qoy - ,d_moy - ,s_store_id - ,sumsales - ,rk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query68.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query68.groovy deleted file mode 100644 index 30e37515cbe938..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query68.groovy +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query68") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 3 or - household_demographics.hd_vehicle_count= 4) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Fairview','Midway') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100""" - qt_ds_shape_68 ''' - explain shape plan - select c_last_name - ,c_first_name - ,ca_city - ,bought_city - ,ss_ticket_number - ,extended_price - ,extended_tax - ,list_price - from (select ss_ticket_number - ,ss_customer_sk - ,ca_city bought_city - ,sum(ss_ext_sales_price) extended_price - ,sum(ss_ext_list_price) list_price - ,sum(ss_ext_tax) extended_tax - from store_sales - ,date_dim - ,store - ,household_demographics - ,customer_address - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and store_sales.ss_addr_sk = customer_address.ca_address_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_dep_count = 3 or - household_demographics.hd_vehicle_count= 4) - and date_dim.d_year in (1998,1998+1,1998+2) - and store.s_city in ('Fairview','Midway') - group by ss_ticket_number - ,ss_customer_sk - ,ss_addr_sk,ca_city) dn - ,customer - ,customer_address current_addr - where ss_customer_sk = c_customer_sk - and customer.c_current_addr_sk = current_addr.ca_address_sk - and current_addr.ca_city <> bought_city - order by c_last_name - ,ss_ticket_number - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query69.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query69.groovy deleted file mode 100644 index 09cbd0185b150d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query69.groovy +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query69") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('IL','TX','ME') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100""" - qt_ds_shape_69 ''' - explain shape plan - select - cd_gender, - cd_marital_status, - cd_education_status, - count(*) cnt1, - cd_purchase_estimate, - count(*) cnt2, - cd_credit_rating, - count(*) cnt3 - from - customer c,customer_address ca,customer_demographics - where - c.c_current_addr_sk = ca.ca_address_sk and - ca_state in ('IL','TX','ME') and - cd_demo_sk = c.c_current_cdemo_sk and - exists (select * - from store_sales,date_dim - where c.c_customer_sk = ss_customer_sk and - ss_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2) and - (not exists (select * - from web_sales,date_dim - where c.c_customer_sk = ws_bill_customer_sk and - ws_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2) and - not exists (select * - from catalog_sales,date_dim - where c.c_customer_sk = cs_ship_customer_sk and - cs_sold_date_sk = d_date_sk and - d_year = 2002 and - d_moy between 1 and 1+2)) - group by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - order by cd_gender, - cd_marital_status, - cd_education_status, - cd_purchase_estimate, - cd_credit_rating - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query7.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query7.groovy deleted file mode 100644 index b018fd1ff86677..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query7.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100""" - qt_ds_shape_7 ''' - explain shape plan - select i_item_id, - avg(ss_quantity) agg1, - avg(ss_list_price) agg2, - avg(ss_coupon_amt) agg3, - avg(ss_sales_price) agg4 - from store_sales, customer_demographics, date_dim, item, promotion - where ss_sold_date_sk = d_date_sk and - ss_item_sk = i_item_sk and - ss_cdemo_sk = cd_demo_sk and - ss_promo_sk = p_promo_sk and - cd_gender = 'F' and - cd_marital_status = 'W' and - cd_education_status = 'College' and - (p_channel_email = 'N' or p_channel_event = 'N') and - d_year = 2001 - group by i_item_id - order by i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query70.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query70.groovy deleted file mode 100644 index 764f9bd2483034..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query70.groovy +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query70") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1220 and 1220+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1220 and 1220+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100""" - qt_ds_shape_70 ''' - explain shape plan - select - sum(ss_net_profit) as total_sum - ,s_state - ,s_county - ,grouping(s_state)+grouping(s_county) as lochierarchy - ,rank() over ( - partition by grouping(s_state)+grouping(s_county), - case when grouping(s_county) = 0 then s_state end - order by sum(ss_net_profit) desc) as rank_within_parent - from - store_sales - ,date_dim d1 - ,store - where - d1.d_month_seq between 1220 and 1220+11 - and d1.d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - and s_state in - ( select s_state - from (select s_state as s_state, - rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking - from store_sales, store, date_dim - where d_month_seq between 1220 and 1220+11 - and d_date_sk = ss_sold_date_sk - and s_store_sk = ss_store_sk - group by s_state - ) tmp1 - where ranking <= 5 - ) - group by rollup(s_state,s_county) - order by - lochierarchy desc - ,case when lochierarchy = 0 then s_state end - ,rank_within_parent - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query71.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query71.groovy deleted file mode 100644 index 409be785e48011..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query71.groovy +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query71") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=2002 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=2002 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=2002 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - """ - qt_ds_shape_71 ''' - explain shape plan - select i_brand_id brand_id, i_brand brand,t_hour,t_minute, - sum(ext_price) ext_price - from item, (select ws_ext_sales_price as ext_price, - ws_sold_date_sk as sold_date_sk, - ws_item_sk as sold_item_sk, - ws_sold_time_sk as time_sk - from web_sales,date_dim - where d_date_sk = ws_sold_date_sk - and d_moy=12 - and d_year=2002 - union all - select cs_ext_sales_price as ext_price, - cs_sold_date_sk as sold_date_sk, - cs_item_sk as sold_item_sk, - cs_sold_time_sk as time_sk - from catalog_sales,date_dim - where d_date_sk = cs_sold_date_sk - and d_moy=12 - and d_year=2002 - union all - select ss_ext_sales_price as ext_price, - ss_sold_date_sk as sold_date_sk, - ss_item_sk as sold_item_sk, - ss_sold_time_sk as time_sk - from store_sales,date_dim - where d_date_sk = ss_sold_date_sk - and d_moy=12 - and d_year=2002 - ) tmp,time_dim - where - sold_item_sk = i_item_sk - and i_manager_id=1 - and time_sk = t_time_sk - and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') - group by i_brand, i_brand_id,t_hour,t_minute - order by ext_price desc, i_brand_id - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query72.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query72.groovy deleted file mode 100644 index 5880246f558a34..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query72.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query72") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '1001-5000' - and d1.d_year = 1998 - and cd_marital_status = 'S' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100""" - qt_ds_shape_72 ''' - explain shape plan - select i_item_desc - ,w_warehouse_name - ,d1.d_week_seq - ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo - ,sum(case when p_promo_sk is not null then 1 else 0 end) promo - ,count(*) total_cnt -from catalog_sales -join inventory on (cs_item_sk = inv_item_sk) -join warehouse on (w_warehouse_sk=inv_warehouse_sk) -join item on (i_item_sk = cs_item_sk) -join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) -join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) -join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) -join date_dim d2 on (inv_date_sk = d2.d_date_sk) -join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) -left outer join promotion on (cs_promo_sk=p_promo_sk) -left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) -where d1.d_week_seq = d2.d_week_seq - and inv_quantity_on_hand < cs_quantity - and (d3.d_date > (d1.d_date + INTERVAL '5' DAY)) - and hd_buy_potential = '1001-5000' - and d1.d_year = 1998 - and cd_marital_status = 'S' -group by i_item_desc,w_warehouse_name,d1.d_week_seq -order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query73.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query73.groovy deleted file mode 100644 index fbd8df6bec621e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query73.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query73") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '5001-10000') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc""" - qt_ds_shape_73 ''' - explain shape plan - select c_last_name - ,c_first_name - ,c_salutation - ,c_preferred_cust_flag - ,ss_ticket_number - ,cnt from - (select ss_ticket_number - ,ss_customer_sk - ,count(*) cnt - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and date_dim.d_dom between 1 and 2 - and (household_demographics.hd_buy_potential = '1001-5000' or - household_demographics.hd_buy_potential = '5001-10000') - and household_demographics.hd_vehicle_count > 0 - and case when household_demographics.hd_vehicle_count > 0 then - household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_county in ('Williamson County','Williamson County','Williamson County','Williamson County') - group by ss_ticket_number,ss_customer_sk) dj,customer - where ss_customer_sk = c_customer_sk - and cnt between 1 and 5 - order by cnt desc, c_last_name asc - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query74.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query74.groovy deleted file mode 100644 index 90580c2e6b756b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query74.groovy +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query74") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,max(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,max(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 1,3,2 -limit 100""" - qt_ds_shape_74 ''' - explain shape plan - with year_total as ( - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,max(ss_net_paid) year_total - ,'s' sale_type - from customer - ,store_sales - ,date_dim - where c_customer_sk = ss_customer_sk - and ss_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - union all - select c_customer_id customer_id - ,c_first_name customer_first_name - ,c_last_name customer_last_name - ,d_year as year - ,max(ws_net_paid) year_total - ,'w' sale_type - from customer - ,web_sales - ,date_dim - where c_customer_sk = ws_bill_customer_sk - and ws_sold_date_sk = d_date_sk - and d_year in (1999,1999+1) - group by c_customer_id - ,c_first_name - ,c_last_name - ,d_year - ) - select - t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name - from year_total t_s_firstyear - ,year_total t_s_secyear - ,year_total t_w_firstyear - ,year_total t_w_secyear - where t_s_secyear.customer_id = t_s_firstyear.customer_id - and t_s_firstyear.customer_id = t_w_secyear.customer_id - and t_s_firstyear.customer_id = t_w_firstyear.customer_id - and t_s_firstyear.sale_type = 's' - and t_w_firstyear.sale_type = 'w' - and t_s_secyear.sale_type = 's' - and t_w_secyear.sale_type = 'w' - and t_s_firstyear.year = 1999 - and t_s_secyear.year = 1999+1 - and t_w_firstyear.year = 1999 - and t_w_secyear.year = 1999+1 - and t_s_firstyear.year_total > 0 - and t_w_firstyear.year_total > 0 - and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end - > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end - order by 1,3,2 -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query75.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query75.groovy deleted file mode 100644 index 775798604b2ee8..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query75.groovy +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query75") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Sports' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Sports' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Sports') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=2002 - AND prev_yr.d_year=2002-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100""" - qt_ds_shape_75 ''' - explain shape plan - WITH all_sales AS ( - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,SUM(sales_cnt) AS sales_cnt - ,SUM(sales_amt) AS sales_amt - FROM (SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt - ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt - FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk - JOIN date_dim ON d_date_sk=cs_sold_date_sk - LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number - AND cs_item_sk=cr_item_sk) - WHERE i_category='Sports' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt - ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt - FROM store_sales JOIN item ON i_item_sk=ss_item_sk - JOIN date_dim ON d_date_sk=ss_sold_date_sk - LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number - AND ss_item_sk=sr_item_sk) - WHERE i_category='Sports' - UNION - SELECT d_year - ,i_brand_id - ,i_class_id - ,i_category_id - ,i_manufact_id - ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt - ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt - FROM web_sales JOIN item ON i_item_sk=ws_item_sk - JOIN date_dim ON d_date_sk=ws_sold_date_sk - LEFT JOIN web_returns ON (ws_order_number=wr_order_number - AND ws_item_sk=wr_item_sk) - WHERE i_category='Sports') sales_detail - GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) - SELECT prev_yr.d_year AS prev_year - ,curr_yr.d_year AS year - ,curr_yr.i_brand_id - ,curr_yr.i_class_id - ,curr_yr.i_category_id - ,curr_yr.i_manufact_id - ,prev_yr.sales_cnt AS prev_yr_cnt - ,curr_yr.sales_cnt AS curr_yr_cnt - ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff - ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff - FROM all_sales curr_yr, all_sales prev_yr - WHERE curr_yr.i_brand_id=prev_yr.i_brand_id - AND curr_yr.i_class_id=prev_yr.i_class_id - AND curr_yr.i_category_id=prev_yr.i_category_id - AND curr_yr.i_manufact_id=prev_yr.i_manufact_id - AND curr_yr.d_year=2002 - AND prev_yr.d_year=2002-1 - AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 - ORDER BY sales_cnt_diff,sales_amt_diff - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query76.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query76.groovy deleted file mode 100644 index 4937f060c5e736..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query76.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query76") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_customer_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_customer_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_promo_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_promo_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_bill_customer_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_bill_customer_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100""" - qt_ds_shape_76 ''' - explain shape plan - select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( - SELECT 'store' as channel, 'ss_customer_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price - FROM store_sales, item, date_dim - WHERE ss_customer_sk IS NULL - AND ss_sold_date_sk=d_date_sk - AND ss_item_sk=i_item_sk - UNION ALL - SELECT 'web' as channel, 'ws_promo_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price - FROM web_sales, item, date_dim - WHERE ws_promo_sk IS NULL - AND ws_sold_date_sk=d_date_sk - AND ws_item_sk=i_item_sk - UNION ALL - SELECT 'catalog' as channel, 'cs_bill_customer_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price - FROM catalog_sales, item, date_dim - WHERE cs_bill_customer_sk IS NULL - AND cs_sold_date_sk=d_date_sk - AND cs_item_sk=i_item_sk) foo -GROUP BY channel, col_name, d_year, d_qoy, i_category -ORDER BY channel, col_name, d_year, d_qoy, i_category -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query77.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query77.groovy deleted file mode 100644 index b188b1982d560c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query77.groovy +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query77") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_77 ''' - explain shape plan - with ss as - (select s_store_sk, - sum(ss_ext_sales_price) as sales, - sum(ss_net_profit) as profit - from store_sales, - date_dim, - store - where ss_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and ss_store_sk = s_store_sk - group by s_store_sk) - , - sr as - (select s_store_sk, - sum(sr_return_amt) as returns, - sum(sr_net_loss) as profit_loss - from store_returns, - date_dim, - store - where sr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and sr_store_sk = s_store_sk - group by s_store_sk), - cs as - (select cs_call_center_sk, - sum(cs_ext_sales_price) as sales, - sum(cs_net_profit) as profit - from catalog_sales, - date_dim - where cs_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - group by cs_call_center_sk - ), - cr as - (select cr_call_center_sk, - sum(cr_return_amount) as returns, - sum(cr_net_loss) as profit_loss - from catalog_returns, - date_dim - where cr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - group by cr_call_center_sk - ), - ws as - ( select wp_web_page_sk, - sum(ws_ext_sales_price) as sales, - sum(ws_net_profit) as profit - from web_sales, - date_dim, - web_page - where ws_sold_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and ws_web_page_sk = wp_web_page_sk - group by wp_web_page_sk), - wr as - (select wp_web_page_sk, - sum(wr_return_amt) as returns, - sum(wr_net_loss) as profit_loss - from web_returns, - date_dim, - web_page - where wr_returned_date_sk = d_date_sk - and d_date between cast('2000-08-10' as date) - and (cast('2000-08-10' as date) + interval 30 day) - and wr_web_page_sk = wp_web_page_sk - group by wp_web_page_sk) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , ss.s_store_sk as id - , sales - , coalesce(returns, 0) as returns - , (profit - coalesce(profit_loss,0)) as profit - from ss left join sr - on ss.s_store_sk = sr.s_store_sk - union all - select 'catalog channel' as channel - , cs_call_center_sk as id - , sales - , returns - , (profit - profit_loss) as profit - from cs - , cr - union all - select 'web channel' as channel - , ws.wp_web_page_sk as id - , sales - , coalesce(returns, 0) returns - , (profit - coalesce(profit_loss,0)) as profit - from ws left join wr - on ws.wp_web_page_sk = wr.wp_web_page_sk - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query78.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query78.groovy deleted file mode 100644 index b96778997d3d48..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query78.groovy +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query78") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null and d_year=1998 - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null and d_year=1998 - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null and d_year=1998 - group by d_year, ss_item_sk, ss_customer_sk - ) -select -ss_customer_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=1998 -order by - ss_customer_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100""" - qt_ds_shape_78 ''' - explain shape plan - with ws as - (select d_year AS ws_sold_year, ws_item_sk, - ws_bill_customer_sk ws_customer_sk, - sum(ws_quantity) ws_qty, - sum(ws_wholesale_cost) ws_wc, - sum(ws_sales_price) ws_sp - from web_sales - left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk - join date_dim on ws_sold_date_sk = d_date_sk - where wr_order_number is null and d_year=1998 - group by d_year, ws_item_sk, ws_bill_customer_sk - ), -cs as - (select d_year AS cs_sold_year, cs_item_sk, - cs_bill_customer_sk cs_customer_sk, - sum(cs_quantity) cs_qty, - sum(cs_wholesale_cost) cs_wc, - sum(cs_sales_price) cs_sp - from catalog_sales - left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk - join date_dim on cs_sold_date_sk = d_date_sk - where cr_order_number is null and d_year=1998 - group by d_year, cs_item_sk, cs_bill_customer_sk - ), -ss as - (select d_year AS ss_sold_year, ss_item_sk, - ss_customer_sk, - sum(ss_quantity) ss_qty, - sum(ss_wholesale_cost) ss_wc, - sum(ss_sales_price) ss_sp - from store_sales - left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk - join date_dim on ss_sold_date_sk = d_date_sk - where sr_ticket_number is null and d_year=1998 - group by d_year, ss_item_sk, ss_customer_sk - ) -select -ss_customer_sk, -round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, -ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, -coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, -coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, -coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price -from ss -left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) -left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) -where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=1998 -order by - ss_customer_sk, - ss_qty desc, ss_wc desc, ss_sp desc, - other_chan_qty, - other_chan_wholesale_cost, - other_chan_sales_price, - ratio -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query79.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query79.groovy deleted file mode 100644 index dda5a1cfd58448..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query79.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query79") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 7 or household_demographics.hd_vehicle_count > -1) - and date_dim.d_dow = 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100""" - qt_ds_shape_79 ''' - explain shape plan - select - c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit - from - (select ss_ticket_number - ,ss_customer_sk - ,store.s_city - ,sum(ss_coupon_amt) amt - ,sum(ss_net_profit) profit - from store_sales,date_dim,store,household_demographics - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_store_sk = store.s_store_sk - and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk - and (household_demographics.hd_dep_count = 7 or household_demographics.hd_vehicle_count > -1) - and date_dim.d_dow = 1 - and date_dim.d_year in (2000,2000+1,2000+2) - and store.s_number_employees between 200 and 295 - group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer - where ss_customer_sk = c_customer_sk - order by c_last_name,c_first_name,substr(s_city,1,30), profit -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query8.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query8.groovy deleted file mode 100644 index 11f5a76e10d94e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query8.groovy +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100""" - qt_ds_shape_8 ''' - explain shape plan - select s_store_name - ,sum(ss_net_profit) - from store_sales - ,date_dim - ,store, - (select ca_zip - from ( - SELECT substr(ca_zip,1,5) ca_zip - FROM customer_address - WHERE substr(ca_zip,1,5) IN ( - '47602','16704','35863','28577','83910','36201', - '58412','48162','28055','41419','80332', - '38607','77817','24891','16226','18410', - '21231','59345','13918','51089','20317', - '17167','54585','67881','78366','47770', - '18360','51717','73108','14440','21800', - '89338','45859','65501','34948','25973', - '73219','25333','17291','10374','18829', - '60736','82620','41351','52094','19326', - '25214','54207','40936','21814','79077', - '25178','75742','77454','30621','89193', - '27369','41232','48567','83041','71948', - '37119','68341','14073','16891','62878', - '49130','19833','24286','27700','40979', - '50412','81504','94835','84844','71954', - '39503','57649','18434','24987','12350', - '86379','27413','44529','98569','16515', - '27287','24255','21094','16005','56436', - '91110','68293','56455','54558','10298', - '83647','32754','27052','51766','19444', - '13869','45645','94791','57631','20712', - '37788','41807','46507','21727','71836', - '81070','50632','88086','63991','20244', - '31655','51782','29818','63792','68605', - '94898','36430','57025','20601','82080', - '33869','22728','35834','29086','92645', - '98584','98072','11652','78093','57553', - '43830','71144','53565','18700','90209', - '71256','38353','54364','28571','96560', - '57839','56355','50679','45266','84680', - '34306','34972','48530','30106','15371', - '92380','84247','92292','68852','13338', - '34594','82602','70073','98069','85066', - '47289','11686','98862','26217','47529', - '63294','51793','35926','24227','14196', - '24594','32489','99060','49472','43432', - '49211','14312','88137','47369','56877', - '20534','81755','15794','12318','21060', - '73134','41255','63073','81003','73873', - '66057','51184','51195','45676','92696', - '70450','90669','98338','25264','38919', - '59226','58581','60298','17895','19489', - '52301','80846','95464','68770','51634', - '19988','18367','18421','11618','67975', - '25494','41352','95430','15734','62585', - '97173','33773','10425','75675','53535', - '17879','41967','12197','67998','79658', - '59130','72592','14851','43933','68101', - '50636','25717','71286','24660','58058', - '72991','95042','15543','33122','69280', - '11912','59386','27642','65177','17672', - '33467','64592','36335','54010','18767', - '63193','42361','49254','33113','33159', - '36479','59080','11855','81963','31016', - '49140','29392','41836','32958','53163', - '13844','73146','23952','65148','93498', - '14530','46131','58454','13376','13378', - '83986','12320','17193','59852','46081', - '98533','52389','13086','68843','31013', - '13261','60560','13443','45533','83583', - '11489','58218','19753','22911','25115', - '86709','27156','32669','13123','51933', - '39214','41331','66943','14155','69998', - '49101','70070','35076','14242','73021', - '59494','15782','29752','37914','74686', - '83086','34473','15751','81084','49230', - '91894','60624','17819','28810','63180', - '56224','39459','55233','75752','43639', - '55349','86057','62361','50788','31830', - '58062','18218','85761','60083','45484', - '21204','90229','70041','41162','35390', - '16364','39500','68908','26689','52868', - '81335','40146','11340','61527','61794', - '71997','30415','59004','29450','58117', - '69952','33562','83833','27385','61860', - '96435','48333','23065','32961','84919', - '61997','99132','22815','56600','68730', - '48017','95694','32919','88217','27116', - '28239','58032','18884','16791','21343', - '97462','18569','75660','15475') - intersect - select ca_zip - from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt - FROM customer_address, customer - WHERE ca_address_sk = c_current_addr_sk and - c_preferred_cust_flag='Y' - group by ca_zip - having count(*) > 10)A1)A2) V1 - where ss_store_sk = s_store_sk - and ss_sold_date_sk = d_date_sk - and d_qoy = 2 and d_year = 1998 - and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) - group by s_store_name - order by s_store_name - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query80.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query80.groovy deleted file mode 100644 index f9e62e637081f6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query80.groovy +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query80") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100""" - qt_ds_shape_80 ''' - explain shape plan - with ssr as - (select s_store_id as store_id, - sum(ss_ext_sales_price) as sales, - sum(coalesce(sr_return_amt, 0)) as returns, - sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit - from store_sales left outer join store_returns on - (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), - date_dim, - store, - item, - promotion - where ss_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and ss_store_sk = s_store_sk - and ss_item_sk = i_item_sk - and i_current_price > 50 - and ss_promo_sk = p_promo_sk - and p_channel_tv = 'N' - group by s_store_id) - , - csr as - (select cp_catalog_page_id as catalog_page_id, - sum(cs_ext_sales_price) as sales, - sum(coalesce(cr_return_amount, 0)) as returns, - sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit - from catalog_sales left outer join catalog_returns on - (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), - date_dim, - catalog_page, - item, - promotion - where cs_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and cs_catalog_page_sk = cp_catalog_page_sk - and cs_item_sk = i_item_sk - and i_current_price > 50 - and cs_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by cp_catalog_page_id) - , - wsr as - (select web_site_id, - sum(ws_ext_sales_price) as sales, - sum(coalesce(wr_return_amt, 0)) as returns, - sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit - from web_sales left outer join web_returns on - (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), - date_dim, - web_site, - item, - promotion - where ws_sold_date_sk = d_date_sk - and d_date between cast('2002-08-14' as date) - and (cast('2002-08-14' as date) + interval 30 day) - and ws_web_site_sk = web_site_sk - and ws_item_sk = i_item_sk - and i_current_price > 50 - and ws_promo_sk = p_promo_sk - and p_channel_tv = 'N' -group by web_site_id) - select channel - , id - , sum(sales) as sales - , sum(returns) as returns - , sum(profit) as profit - from - (select 'store channel' as channel - , concat('store', store_id) as id - , sales - , returns - , profit - from ssr - union all - select 'catalog channel' as channel - , concat('catalog_page', catalog_page_id) as id - , sales - , returns - , profit - from csr - union all - select 'web channel' as channel - , concat('web_site', web_site_id) as id - , sales - , returns - , profit - from wsr - ) x - group by rollup (channel, id) - order by channel - ,id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query81.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query81.groovy deleted file mode 100644 index 0f39073baf4b47..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query81.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query81") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2001 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'TN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100""" - qt_ds_shape_81 ''' - explain shape plan - with customer_total_return as - (select cr_returning_customer_sk as ctr_customer_sk - ,ca_state as ctr_state, - sum(cr_return_amt_inc_tax) as ctr_total_return - from catalog_returns - ,date_dim - ,customer_address - where cr_returned_date_sk = d_date_sk - and d_year =2001 - and cr_returning_addr_sk = ca_address_sk - group by cr_returning_customer_sk - ,ca_state ) - select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - from customer_total_return ctr1 - ,customer_address - ,customer - where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 - from customer_total_return ctr2 - where ctr1.ctr_state = ctr2.ctr_state) - and ca_address_sk = c_current_addr_sk - and ca_state = 'TN' - and ctr1.ctr_customer_sk = c_customer_sk - order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name - ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset - ,ca_location_type,ctr_total_return - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query82.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query82.groovy deleted file mode 100644 index 6f3260b90f1206..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query82.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query82") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 58 and 58+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('2001-01-13' as date) and (cast('2001-01-13' as date) + interval 60 day) - and i_manufact_id in (259,559,580,485) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100""" - qt_ds_shape_82 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_current_price - from item, inventory, date_dim, store_sales - where i_current_price between 58 and 58+30 - and inv_item_sk = i_item_sk - and d_date_sk=inv_date_sk - and d_date between cast('2001-01-13' as date) and (cast('2001-01-13' as date) + interval 60 day) - and i_manufact_id in (259,559,580,485) - and inv_quantity_on_hand between 100 and 500 - and ss_item_sk = i_item_sk - group by i_item_id,i_item_desc,i_current_price - order by i_item_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query83.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query83.groovy deleted file mode 100644 index da47ab88acbd69..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query83.groovy +++ /dev/null @@ -1,171 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query83") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100""" - qt_ds_shape_83 ''' - explain shape plan - with sr_items as - (select i_item_id item_id, - sum(sr_return_quantity) sr_item_qty - from store_returns, - item, - date_dim - where sr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and sr_returned_date_sk = d_date_sk - group by i_item_id), - cr_items as - (select i_item_id item_id, - sum(cr_return_quantity) cr_item_qty - from catalog_returns, - item, - date_dim - where cr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and cr_returned_date_sk = d_date_sk - group by i_item_id), - wr_items as - (select i_item_id item_id, - sum(wr_return_quantity) wr_item_qty - from web_returns, - item, - date_dim - where wr_item_sk = i_item_sk - and d_date in - (select d_date - from date_dim - where d_week_seq in - (select d_week_seq - from date_dim - where d_date in ('2001-07-13','2001-09-10','2001-11-16'))) - and wr_returned_date_sk = d_date_sk - group by i_item_id) - select sr_items.item_id - ,sr_item_qty - ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev - ,cr_item_qty - ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev - ,wr_item_qty - ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev - ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average - from sr_items - ,cr_items - ,wr_items - where sr_items.item_id=cr_items.item_id - and sr_items.item_id=wr_items.item_id - order by sr_items.item_id - ,sr_item_qty - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query84.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query84.groovy deleted file mode 100644 index 98b9cea582ad5c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query84.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query84") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Woodland' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 60306 - and ib_upper_bound <= 60306 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100""" - qt_ds_shape_84 ''' - explain shape plan - select c_customer_id as customer_id - , concat(concat(coalesce(c_last_name,''), ','), coalesce(c_first_name,'')) as customername - from customer - ,customer_address - ,customer_demographics - ,household_demographics - ,income_band - ,store_returns - where ca_city = 'Woodland' - and c_current_addr_sk = ca_address_sk - and ib_lower_bound >= 60306 - and ib_upper_bound <= 60306 + 50000 - and ib_income_band_sk = hd_income_band_sk - and cd_demo_sk = c_current_cdemo_sk - and hd_demo_sk = c_current_hdemo_sk - and sr_cdemo_sk = cd_demo_sk - order by c_customer_id - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query85.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query85.groovy deleted file mode 100644 index 8add0348dae1fd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query85.groovy +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query85") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 1998 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'D' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Primary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'College' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'U' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('NC', 'TX', 'IA') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('WI', 'WV', 'GA') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('OK', 'VA', 'KY') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100""" - qt_ds_shape_85 ''' - explain shape plan - select substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) - from web_sales, web_returns, web_page, customer_demographics cd1, - customer_demographics cd2, customer_address, date_dim, reason - where ws_web_page_sk = wp_web_page_sk - and ws_item_sk = wr_item_sk - and ws_order_number = wr_order_number - and ws_sold_date_sk = d_date_sk and d_year = 1998 - and cd1.cd_demo_sk = wr_refunded_cdemo_sk - and cd2.cd_demo_sk = wr_returning_cdemo_sk - and ca_address_sk = wr_refunded_addr_sk - and r_reason_sk = wr_reason_sk - and - ( - ( - cd1.cd_marital_status = 'D' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Primary' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 100.00 and 150.00 - ) - or - ( - cd1.cd_marital_status = 'S' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'College' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 50.00 and 100.00 - ) - or - ( - cd1.cd_marital_status = 'U' - and - cd1.cd_marital_status = cd2.cd_marital_status - and - cd1.cd_education_status = 'Advanced Degree' - and - cd1.cd_education_status = cd2.cd_education_status - and - ws_sales_price between 150.00 and 200.00 - ) - ) - and - ( - ( - ca_country = 'United States' - and - ca_state in ('NC', 'TX', 'IA') - and ws_net_profit between 100 and 200 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('WI', 'WV', 'GA') - and ws_net_profit between 150 and 300 - ) - or - ( - ca_country = 'United States' - and - ca_state in ('OK', 'VA', 'KY') - and ws_net_profit between 50 and 250 - ) - ) -group by r_reason_desc -order by substr(r_reason_desc,1,20) - ,avg(ws_quantity) - ,avg(wr_refunded_cash) - ,avg(wr_fee) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query86.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query86.groovy deleted file mode 100644 index e5247041ec8897..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query86.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query86") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1186 and 1186+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100""" - qt_ds_shape_86 ''' - explain shape plan - select - sum(ws_net_paid) as total_sum - ,i_category - ,i_class - ,grouping(i_category)+grouping(i_class) as lochierarchy - ,rank() over ( - partition by grouping(i_category)+grouping(i_class), - case when grouping(i_class) = 0 then i_category end - order by sum(ws_net_paid) desc) as rank_within_parent - from - web_sales - ,date_dim d1 - ,item - where - d1.d_month_seq between 1186 and 1186+11 - and d1.d_date_sk = ws_sold_date_sk - and i_item_sk = ws_item_sk - group by rollup(i_category,i_class) - order by - lochierarchy desc, - case when lochierarchy = 0 then i_category end, - rank_within_parent - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query87.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query87.groovy deleted file mode 100644 index b082d2bdbc1299..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query87.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query87") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) -) cool_cust -""" - qt_ds_shape_87 ''' - explain shape plan - select count(*) -from ((select distinct c_last_name, c_first_name, d_date - from store_sales, date_dim, customer - where store_sales.ss_sold_date_sk = date_dim.d_date_sk - and store_sales.ss_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) - except - (select distinct c_last_name, c_first_name, d_date - from catalog_sales, date_dim, customer - where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk - and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) - except - (select distinct c_last_name, c_first_name, d_date - from web_sales, date_dim, customer - where web_sales.ws_sold_date_sk = date_dim.d_date_sk - and web_sales.ws_bill_customer_sk = customer.c_customer_sk - and d_month_seq between 1202 and 1202+11) -) cool_cust - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query88.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query88.groovy deleted file mode 100644 index 0b9c4c7c7d2de6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query88.groovy +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query88") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 -""" - qt_ds_shape_88 ''' - explain shape plan - select * -from - (select count(*) h8_30_to_9 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s1, - (select count(*) h9_to_9_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s2, - (select count(*) h9_30_to_10 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 9 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s3, - (select count(*) h10_to_10_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s4, - (select count(*) h10_30_to_11 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 10 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s5, - (select count(*) h11_to_11_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s6, - (select count(*) h11_30_to_12 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 11 - and time_dim.t_minute >= 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s7, - (select count(*) h12_to_12_30 - from store_sales, household_demographics , time_dim, store - where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 12 - and time_dim.t_minute < 30 - and ((household_demographics.hd_dep_count = 0 and household_demographics.hd_vehicle_count<=0+2) or - (household_demographics.hd_dep_count = -1 and household_demographics.hd_vehicle_count<=-1+2) or - (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) - and store.s_store_name = 'ese') s8 - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query89.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query89.groovy deleted file mode 100644 index dd5752b43fa4f0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query89.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query89") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (2001) and - ((i_category in ('Books','Children','Electronics') and - i_class in ('history','school-uniforms','audio') - ) - or (i_category in ('Men','Sports','Shoes') and - i_class in ('pants','tennis','womens') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100""" - qt_ds_shape_89 ''' - explain shape plan - select * -from( -select i_category, i_class, i_brand, - s_store_name, s_company_name, - d_moy, - sum(ss_sales_price) sum_sales, - avg(sum(ss_sales_price)) over - (partition by i_category, i_brand, s_store_name, s_company_name) - avg_monthly_sales -from item, store_sales, date_dim, store -where ss_item_sk = i_item_sk and - ss_sold_date_sk = d_date_sk and - ss_store_sk = s_store_sk and - d_year in (2001) and - ((i_category in ('Books','Children','Electronics') and - i_class in ('history','school-uniforms','audio') - ) - or (i_category in ('Men','Sports','Shoes') and - i_class in ('pants','tennis','womens') - )) -group by i_category, i_class, i_brand, - s_store_name, s_company_name, d_moy) tmp1 -where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 -order by sum_sales - avg_monthly_sales, s_store_name -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query9.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query9.groovy deleted file mode 100644 index d72b880214bcc9..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query9.groovy +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - sql "set enable_parallel_result_sink=false;" - - def ds = """select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 1071 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 39161 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 29434 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 6568 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 21216 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 -""" - qt_ds_shape_9 ''' - explain shape plan - select case when (select count(*) - from store_sales - where ss_quantity between 1 and 20) > 1071 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 1 and 20) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 1 and 20) end bucket1 , - case when (select count(*) - from store_sales - where ss_quantity between 21 and 40) > 39161 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 21 and 40) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 21 and 40) end bucket2, - case when (select count(*) - from store_sales - where ss_quantity between 41 and 60) > 29434 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 41 and 60) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 41 and 60) end bucket3, - case when (select count(*) - from store_sales - where ss_quantity between 61 and 80) > 6568 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 61 and 80) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 61 and 80) end bucket4, - case when (select count(*) - from store_sales - where ss_quantity between 81 and 100) > 21216 - then (select avg(ss_ext_tax) - from store_sales - where ss_quantity between 81 and 100) - else (select avg(ss_net_paid_inc_tax) - from store_sales - where ss_quantity between 81 and 100) end bucket5 -from reason -where r_reason_sk = 1 - - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query90.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query90.groovy deleted file mode 100644 index 9bce112a5c2d07..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query90.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query90") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 12 and 12+1 - and household_demographics.hd_dep_count = 6 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 14 and 14+1 - and household_demographics.hd_dep_count = 6 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100""" - qt_ds_shape_90 ''' - explain shape plan - select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio - from ( select count(*) amc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 12 and 12+1 - and household_demographics.hd_dep_count = 6 - and web_page.wp_char_count between 5000 and 5200) at, - ( select count(*) pmc - from web_sales, household_demographics , time_dim, web_page - where ws_sold_time_sk = time_dim.t_time_sk - and ws_ship_hdemo_sk = household_demographics.hd_demo_sk - and ws_web_page_sk = web_page.wp_web_page_sk - and time_dim.t_hour between 14 and 14+1 - and household_demographics.hd_dep_count = 6 - and web_page.wp_char_count between 5000 and 5200) pt - order by am_pm_ratio - limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query91.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query91.groovy deleted file mode 100644 index e250435336edfa..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query91.groovy +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query91") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2000 -and d_moy = 12 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like 'Unknown%' -and ca_gmt_offset = -7 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc""" - qt_ds_shape_91 ''' - explain shape plan - select - cc_call_center_id Call_Center, - cc_name Call_Center_Name, - cc_manager Manager, - sum(cr_net_loss) Returns_Loss -from - call_center, - catalog_returns, - date_dim, - customer, - customer_address, - customer_demographics, - household_demographics -where - cr_call_center_sk = cc_call_center_sk -and cr_returned_date_sk = d_date_sk -and cr_returning_customer_sk= c_customer_sk -and cd_demo_sk = c_current_cdemo_sk -and hd_demo_sk = c_current_hdemo_sk -and ca_address_sk = c_current_addr_sk -and d_year = 2000 -and d_moy = 12 -and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') - or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) -and hd_buy_potential like 'Unknown%' -and ca_gmt_offset = -7 -group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status -order by sum(cr_net_loss) desc - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query92.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query92.groovy deleted file mode 100644 index 5ec81ae0610cae..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query92.groovy +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query92") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 714 -and i_item_sk = ws_item_sk -and d_date between '2000-02-01' and - (cast('2000-02-01' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2000-02-01' and - (cast('2000-02-01' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100""" - qt_ds_shape_92 ''' - explain shape plan - select - sum(ws_ext_discount_amt) as "Excess Discount Amount" -from - web_sales - ,item - ,date_dim -where -i_manufact_id = 714 -and i_item_sk = ws_item_sk -and d_date between '2000-02-01' and - (cast('2000-02-01' as date) + interval 90 day) -and d_date_sk = ws_sold_date_sk -and ws_ext_discount_amt - > ( - SELECT - 1.3 * avg(ws_ext_discount_amt) - FROM - web_sales - ,date_dim - WHERE - ws_item_sk = i_item_sk - and d_date between '2000-02-01' and - (cast('2000-02-01' as date) + interval 90 day) - and d_date_sk = ws_sold_date_sk - ) -order by sum(ws_ext_discount_amt) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query93.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query93.groovy deleted file mode 100644 index f0f52194f2d4f6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query93.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query93") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'reason 58') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100""" - qt_ds_shape_93 ''' - explain shape plan - select ss_customer_sk - ,sum(act_sales) sumsales - from (select ss_item_sk - ,ss_ticket_number - ,ss_customer_sk - ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price - else (ss_quantity*ss_sales_price) end act_sales - from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk - and sr_ticket_number = ss_ticket_number) - ,reason - where sr_reason_sk = r_reason_sk - and r_reason_desc = 'reason 58') t - group by ss_customer_sk - order by sumsales, ss_customer_sk -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query94.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query94.groovy deleted file mode 100644 index cee27ca765ab57..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query94.groovy +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query94") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2002-5-01' and - (cast('2002-5-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_94 ''' - explain shape plan - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2002-5-01' and - (cast('2002-5-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'OK' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and exists (select * - from web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) -and not exists(select * - from web_returns wr1 - where ws1.ws_order_number = wr1.wr_order_number) -order by count(distinct ws_order_number) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query95.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query95.groovy deleted file mode 100644 index 8a19be8d039e4d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query95.groovy +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query95") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2001-4-01' and - (cast('2001-4-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'VA' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100""" - qt_ds_shape_95 ''' - explain shape plan - with ws_wh as -(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 - from web_sales ws1,web_sales ws2 - where ws1.ws_order_number = ws2.ws_order_number - and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) - select - count(distinct ws_order_number) as "order count" - ,sum(ws_ext_ship_cost) as "total shipping cost" - ,sum(ws_net_profit) as "total net profit" -from - web_sales ws1 - ,date_dim - ,customer_address - ,web_site -where - d_date between '2001-4-01' and - (cast('2001-4-01' as date) + interval 60 day) -and ws1.ws_ship_date_sk = d_date_sk -and ws1.ws_ship_addr_sk = ca_address_sk -and ca_state = 'VA' -and ws1.ws_web_site_sk = web_site_sk -and web_company_name = 'pri' -and ws1.ws_order_number in (select ws_order_number - from ws_wh) -and ws1.ws_order_number in (select wr_order_number - from web_returns,ws_wh - where wr_order_number = ws_wh.ws_order_number) -order by count(distinct ws_order_number) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query96.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query96.groovy deleted file mode 100644 index 14645aee61163f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query96.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query96") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 0 - and store.s_store_name = 'ese' -order by count(*) -limit 100""" - qt_ds_shape_96 ''' - explain shape plan - select count(*) -from store_sales - ,household_demographics - ,time_dim, store -where ss_sold_time_sk = time_dim.t_time_sk - and ss_hdemo_sk = household_demographics.hd_demo_sk - and ss_store_sk = s_store_sk - and time_dim.t_hour = 8 - and time_dim.t_minute >= 30 - and household_demographics.hd_dep_count = 0 - and store.s_store_name = 'ese' -order by count(*) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query97.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query97.groovy deleted file mode 100644 index e30c1ec14a7857..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query97.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query97") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - multi_sql """ - use ${db}; - set enable_nereids_planner=true; - set enable_nereids_distribute_planner=true; - set enable_fallback_to_original_planner=false; - set exec_mem_limit=21G; - set be_number_for_test=3; - set enable_runtime_filter_prune=false; - set parallel_pipeline_task_num=8; - set forbid_unknown_col_stats=false; - set enable_stats=true; - set runtime_filter_type=8; - set broadcast_row_count_limit = 30000000; - set enable_nereids_timeout = false; - set enable_pipeline_engine = true; - set disable_nereids_rules='PRUNE_EMPTY_PARTITION'; - set push_topn_to_agg = true; - set topn_opt_limit_threshold=1024; - """ - - def ds = """with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1199 and 1199 + 11 and ss_sold_date_sk IS NOT NULL -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1199 and 1199 + 11 and cs_sold_date_sk IS NOT NULL -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100""" - qt_ds_shape_97 ''' - explain shape plan - with ssci as ( -select ss_customer_sk customer_sk - ,ss_item_sk item_sk -from store_sales,date_dim -where ss_sold_date_sk = d_date_sk - and d_month_seq between 1199 and 1199 + 11 and ss_sold_date_sk IS NOT NULL -group by ss_customer_sk - ,ss_item_sk), -csci as( - select cs_bill_customer_sk customer_sk - ,cs_item_sk item_sk -from catalog_sales,date_dim -where cs_sold_date_sk = d_date_sk - and d_month_seq between 1199 and 1199 + 11 and cs_sold_date_sk IS NOT NULL -group by cs_bill_customer_sk - ,cs_item_sk) - select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only - ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only - ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog -from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk - and ssci.item_sk = csci.item_sk) -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query98.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query98.groovy deleted file mode 100644 index dedcf82dae4d04..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query98.groovy +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query98") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Men', 'Sports', 'Jewelry') - and ss_sold_date_sk = d_date_sk - and d_date between cast('1999-02-05' as date) - and (cast('1999-02-05' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio""" - qt_ds_shape_98 ''' - explain shape plan - select i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price - ,sum(ss_ext_sales_price) as itemrevenue - ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over - (partition by i_class) as revenueratio -from - store_sales - ,item - ,date_dim -where - ss_item_sk = i_item_sk - and i_category in ('Men', 'Sports', 'Jewelry') - and ss_sold_date_sk = d_date_sk - and d_date between cast('1999-02-05' as date) - and (cast('1999-02-05' as date) + interval 30 day) -group by - i_item_id - ,i_item_desc - ,i_category - ,i_class - ,i_current_price -order by - i_category - ,i_class - ,i_item_id - ,i_item_desc - ,revenueratio - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query99.groovy b/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query99.groovy deleted file mode 100644 index 6f9e47f2140a90..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpcds_sf1000/shape/query99.groovy +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("query99") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql 'set dump_nereids_memo=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - def ds = """select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100""" - qt_ds_shape_99 ''' - explain shape plan - select - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and - (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and - (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and - (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" - ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" -from - catalog_sales - ,warehouse - ,ship_mode - ,call_center - ,date_dim -where - d_month_seq between 1194 and 1194 + 11 -and cs_ship_date_sk = d_date_sk -and cs_warehouse_sk = w_warehouse_sk -and cs_ship_mode_sk = sm_ship_mode_sk -and cs_call_center_sk = cc_call_center_sk -group by - substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -order by substr(w_warehouse_name,1,20) - ,sm_type - ,cc_name -limit 100 - ''' -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/load.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/load.groovy deleted file mode 100644 index 365e953b78953f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/load.groovy +++ /dev/null @@ -1,484 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -suite("load") { - if (isCloudMode()) { - return - } - String database = context.config.getDbNameByFile(context.file) - sql "drop database if exists ${database}" - sql "create database ${database}" - sql "use ${database}" - sql """ - drop table if exists lineitem; - """ - sql """ - CREATE TABLE lineitem ( - l_shipdate DATEV2 NOT NULL, - l_orderkey bigint NOT NULL, - l_linenumber int not null, - l_partkey int NOT NULL, - l_suppkey int not null, - l_quantity decimal(15, 2) NOT NULL, - l_extendedprice decimal(15, 2) NOT NULL, - l_discount decimal(15, 2) NOT NULL, - l_tax decimal(15, 2) NOT NULL, - l_returnflag VARCHAR(1) NOT NULL, - l_linestatus VARCHAR(1) NOT NULL, - l_commitdate DATEV2 NOT NULL, - l_receiptdate DATEV2 NOT NULL, - l_shipinstruct VARCHAR(25) NOT NULL, - l_shipmode VARCHAR(10) NOT NULL, - l_comment VARCHAR(44) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`l_shipdate`, `l_orderkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`l_orderkey`) BUCKETS 96 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "lineitem_orders" - ); - """ - - sql """ - drop table if exists orders; - """ - - sql ''' - CREATE TABLE orders ( - o_orderkey bigint NOT NULL, - o_orderdate DATEV2 NOT NULL, - o_custkey int NOT NULL, - o_orderstatus VARCHAR(1) NOT NULL, - o_totalprice decimal(15, 2) NOT NULL, - o_orderpriority VARCHAR(15) NOT NULL, - o_clerk VARCHAR(15) NOT NULL, - o_shippriority int NOT NULL, - o_comment VARCHAR(79) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`o_orderkey`, `o_orderdate`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`o_orderkey`) BUCKETS 96 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "lineitem_orders" - ); ''' - - sql ''' - drop table if exists partsupp; - ''' - - sql ''' - CREATE TABLE partsupp ( - ps_partkey int NOT NULL, - ps_suppkey int NOT NULL, - ps_availqty int NOT NULL, - ps_supplycost decimal(15, 2) NOT NULL, - ps_comment VARCHAR(199) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`ps_partkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`ps_partkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "part_partsupp" - ); - ''' - - sql ''' - drop table if exists part; - ''' - - sql ''' - CREATE TABLE part ( - p_partkey int NOT NULL, - p_name VARCHAR(55) NOT NULL, - p_mfgr VARCHAR(25) NOT NULL, - p_brand VARCHAR(10) NOT NULL, - p_type VARCHAR(25) NOT NULL, - p_size int NOT NULL, - p_container VARCHAR(10) NOT NULL, - p_retailprice decimal(15, 2) NOT NULL, - p_comment VARCHAR(23) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`p_partkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`p_partkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1", - "colocate_with" = "part_partsupp" - ); - ''' - - sql ''' - drop table if exists customer; - ''' - - sql ''' - CREATE TABLE customer ( - c_custkey int NOT NULL, - c_name VARCHAR(25) NOT NULL, - c_address VARCHAR(40) NOT NULL, - c_nationkey int NOT NULL, - c_phone VARCHAR(15) NOT NULL, - c_acctbal decimal(15, 2) NOT NULL, - c_mktsegment VARCHAR(10) NOT NULL, - c_comment VARCHAR(117) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`c_custkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`c_custkey`) BUCKETS 24 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists supplier - ''' - - sql ''' - CREATE TABLE supplier ( - s_suppkey int NOT NULL, - s_name VARCHAR(25) NOT NULL, - s_address VARCHAR(40) NOT NULL, - s_nationkey int NOT NULL, - s_phone VARCHAR(15) NOT NULL, - s_acctbal decimal(15, 2) NOT NULL, - s_comment VARCHAR(101) NOT NULL - )ENGINE=OLAP - DUPLICATE KEY(`s_suppkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`s_suppkey`) BUCKETS 12 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists nation; - ''' - - sql ''' - CREATE TABLE `nation` ( - `n_nationkey` int(11) NOT NULL, - `n_name` varchar(25) NOT NULL, - `n_regionkey` int(11) NOT NULL, - `n_comment` varchar(152) NULL - ) ENGINE=OLAP - DUPLICATE KEY(`N_NATIONKEY`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`N_NATIONKEY`) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop table if exists region; - ''' - - sql ''' - CREATE TABLE region ( - r_regionkey int NOT NULL, - r_name VARCHAR(25) NOT NULL, - r_comment VARCHAR(152) - )ENGINE=OLAP - DUPLICATE KEY(`r_regionkey`) - COMMENT "OLAP" - DISTRIBUTED BY HASH(`r_regionkey`) BUCKETS 1 - PROPERTIES ( - "replication_num" = "1" - ); - ''' - - sql ''' - drop view if exists revenue0; - ''' - - sql ''' - create view revenue0 (supplier_no, total_revenue) as - select - l_suppkey, - sum(l_extendedprice * (1 - l_discount)) - from - lineitem - where - l_shipdate >= date '1996-01-01' - and l_shipdate < date '1996-01-01' + interval '3' month - group by - l_suppkey; - ''' - - -sql ''' -alter table lineitem modify column l_shipdate set stats ('ndv'='2539', 'num_nulls'='0', 'min_value'='1992-01-02', 'max_value'='1998-12-01', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_orderkey set stats ('ndv'='1491920000', 'num_nulls'='0', 'min_value'='1', 'max_value'='6000000000', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_linenumber set stats ('ndv'='7', 'num_nulls'='0', 'min_value'='1', 'max_value'='7', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_partkey set stats ('ndv'='200778064', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000000', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_suppkey set stats ('ndv'='10031328', 'num_nulls'='0', 'min_value'='1', 'max_value'='10000000', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_quantity set stats ('ndv'='50', 'num_nulls'='0', 'min_value'='1.00', 'max_value'='50.00', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_extendedprice set stats ('ndv'='3793003', 'num_nulls'='0', 'min_value'='900.00', 'max_value'='104950.00', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_discount set stats ('ndv'='11', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='0.10', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_tax set stats ('ndv'='9', 'num_nulls'='0', 'min_value'='0.00', 'max_value'='0.08', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_returnflag set stats ('ndv'='3', 'num_nulls'='0', 'min_value'='A', 'max_value'='R', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_linestatus set stats ('ndv'='2', 'num_nulls'='0', 'min_value'='F', 'max_value'='O', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_commitdate set stats ('ndv'='2473', 'num_nulls'='0', 'min_value'='1992-01-31', 'max_value'='1998-10-31', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_receiptdate set stats ('ndv'='2568', 'num_nulls'='0', 'min_value'='1992-01-03', 'max_value'='1998-12-31', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_shipinstruct set stats ('ndv'='4', 'num_nulls'='0', 'min_value'='COLLECT COD', 'max_value'='TAKE BACK RETURN', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_shipmode set stats ('ndv'='7', 'num_nulls'='0', 'min_value'='AIR', 'max_value'='TRUCK', 'row_count'='5999989709'); -''' - -sql ''' -alter table lineitem modify column l_comment set stats ('ndv'='155259104', 'num_nulls'='0', 'min_value'=' Tiresias ', 'max_value'='zzle? unusual', 'row_count'='5999989709'); -''' - - -sql ''' -alter table orders modify column o_orderkey set stats ('ndv'='1491920000', 'num_nulls'='0', 'min_value'='1', 'max_value'='6000000000', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_orderdate set stats ('ndv'='2417', 'num_nulls'='0', 'min_value'='1992-01-01', 'max_value'='1998-08-02', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_custkey set stats ('ndv'='101410744', 'num_nulls'='0', 'min_value'='1', 'max_value'='149999999', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_orderstatus set stats ('ndv'='3', 'num_nulls'='0', 'min_value'='F', 'max_value'='P', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_totalprice set stats ('ndv'='41700404', 'num_nulls'='0', 'min_value'='810.87', 'max_value'='602901.81', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_orderpriority set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='1-URGENT', 'max_value'='5-LOW', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_clerk set stats ('ndv'='1013689', 'num_nulls'='0', 'min_value'='Clerk#000000001', 'max_value'='Clerk#001000000', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_shippriority set stats ('ndv'='1', 'num_nulls'='0', 'min_value'='0', 'max_value'='0', 'row_count'='1500000000'); -''' - -sql ''' -alter table orders modify column o_comment set stats ('ndv'='272632352', 'num_nulls'='0', 'min_value'=' Tiresias about the', 'max_value'='zzle? unusual requests w', 'row_count'='1500000000'); -''' - - -sql ''' -alter table partsupp modify column ps_partkey set stats ('ndv'='200778064', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000000', 'row_count'='800000000'); -''' - -sql ''' -alter table partsupp modify column ps_suppkey set stats ('ndv'='10031328', 'num_nulls'='0', 'min_value'='1', 'max_value'='10000000', 'row_count'='800000000'); -''' - -sql ''' -alter table partsupp modify column ps_availqty set stats ('ndv'='10008', 'num_nulls'='0', 'min_value'='1', 'max_value'='9999', 'row_count'='800000000'); -''' - -sql ''' -alter table partsupp modify column ps_supplycost set stats ('ndv'='100279', 'num_nulls'='0', 'min_value'='1.00', 'max_value'='1000.00', 'row_count'='800000000'); -''' - -sql ''' -alter table partsupp modify column ps_comment set stats ('ndv'='303150816', 'num_nulls'='0', 'min_value'=' Tiresias about the accounts detect quickly final foxes. instructions about the blithely unusual theodolites use blithely f', 'max_value'='zzle? unusual requests wake slyly. slyly regular requests are e', 'row_count'='800000000'); -''' - - - -sql ''' -alter table part modify column p_partkey set stats ('ndv'='200778064', 'num_nulls'='0', 'min_value'='1', 'max_value'='200000000', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_name set stats ('ndv'='196191408', 'num_nulls'='0', 'min_value'='almond antique aquamarine azure blush', 'max_value'='yellow white wheat violet red', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_mfgr set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='Manufacturer#1', 'max_value'='Manufacturer#5', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_brand set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='Brand#11', 'max_value'='Brand#55', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_type set stats ('ndv'='150', 'num_nulls'='0', 'min_value'='ECONOMY ANODIZED BRASS', 'max_value'='STANDARD POLISHED TIN', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_size set stats ('ndv'='50', 'num_nulls'='0', 'min_value'='1', 'max_value'='50', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_container set stats ('ndv'='40', 'num_nulls'='0', 'min_value'='JUMBO BAG', 'max_value'='WRAP PKG', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_retailprice set stats ('ndv'='120904', 'num_nulls'='0', 'min_value'='900.00', 'max_value'='2099.00', 'row_count'='200000000'); -''' - -sql ''' -alter table part modify column p_comment set stats ('ndv'='14213541', 'num_nulls'='0', 'min_value'=' Tire', 'max_value'='zzle? speci', 'row_count'='200000000'); -''' - - - -sql ''' -alter table supplier modify column s_suppkey set stats ('ndv'='10031328', 'num_nulls'='0', 'min_value'='1', 'max_value'='10000000', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_name set stats ('ndv'='9992858', 'num_nulls'='0', 'min_value'='Supplier#000000001', 'max_value'='Supplier#010000000', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_address set stats ('ndv'='10000390', 'num_nulls'='0', 'min_value'=' 04SJW3NWgeWBx2YualVtK62DXnr', 'max_value'='zzzzr MaemffsKy', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_nationkey set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='0', 'max_value'='24', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_phone set stats ('ndv'='9975965', 'num_nulls'='0', 'min_value'='10-100-101-9215', 'max_value'='34-999-999-3239', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_acctbal set stats ('ndv'='1109296', 'num_nulls'='0', 'min_value'='-999.99', 'max_value'='9999.99', 'row_count'='10000000'); -''' - -sql ''' -alter table supplier modify column s_comment set stats ('ndv'='9854117', 'num_nulls'='0', 'min_value'=' Customer accounts are blithely furiousRecommends', 'max_value'='zzle? special packages haggle carefully regular inst', 'row_count'='10000000'); -''' - - - -sql ''' -alter table customer modify column c_custkey set stats ('ndv'='151682592', 'num_nulls'='0', 'min_value'='1', 'max_value'='150000000', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_name set stats ('ndv'='149989056', 'num_nulls'='0', 'min_value'='Customer#000000001', 'max_value'='Customer#150000000', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_address set stats ('ndv'='149316720', 'num_nulls'='0', 'min_value'=' 2WGW,hiM7jHg2', 'max_value'='zzzzyW,aeC8HnFV', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_nationkey set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='0', 'max_value'='24', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_phone set stats ('ndv'='150226160', 'num_nulls'='0', 'min_value'='10-100-100-3024', 'max_value'='34-999-999-9215', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_acctbal set stats ('ndv'='1109296', 'num_nulls'='0', 'min_value'='-999.99', 'max_value'='9999.99', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_mktsegment set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='AUTOMOBILE', 'max_value'='MACHINERY', 'row_count'='150000000'); -''' - -sql ''' -alter table customer modify column c_comment set stats ('ndv'='120255488', 'num_nulls'='0', 'min_value'=' Tiresias about the accounts haggle quiet, busy foxe', 'max_value'='zzle? special accounts about the iro', 'row_count'='150000000'); -''' - - - -sql ''' -alter table region modify column r_regionkey set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='0', 'max_value'='4', 'row_count'='5'); -''' - -sql ''' -alter table region modify column r_name set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='AFRICA', 'max_value'='MIDDLE EAST', 'row_count'='5'); -''' - -sql ''' -alter table region modify column r_comment set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='ges. thinly even pinto beans ca', 'max_value'='uickly special accounts cajole carefully blithely close requests. carefully final asymptotes haggle furiousl', 'row_count'='5'); -''' - - - -sql ''' -alter table nation modify column n_nationkey set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='0', 'max_value'='24', 'row_count'='25'); -''' - -sql ''' -alter table nation modify column n_name set stats ('ndv'='25', 'num_nulls'='0', 'min_value'='ALGERIA', 'max_value'='VIETNAM', 'row_count'='25'); -''' - -sql ''' -alter table nation modify column n_regionkey set stats ('ndv'='5', 'num_nulls'='0', 'min_value'='0', 'max_value'='4', 'row_count'='25'); -''' - -sql ''' -alter table nation modify column n_comment set stats ('ndv'='25', 'num_nulls'='0', 'min_value'=' haggle. carefully final deposits detect slyly agai', 'max_value'='y final packages. slow foxes cajole quickly. quickly silent platelets breach ironic accounts. unusual pinto be', 'row_count'='25'); -''' - -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.groovy deleted file mode 100644 index ae7072c55ed913..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q1.groovy +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, - sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order - from - lineitem - where - l_shipdate <= date '1998-12-01' - interval '90' day - group by - l_returnflag, - l_linestatus - order by - l_returnflag, - l_linestatus; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.groovy deleted file mode 100644 index 2ea2891b4a2c6e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q10.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.groovy deleted file mode 100644 index 997aa07898b61e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q11.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - select - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.groovy deleted file mode 100644 index b78ad09e0e6bee..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q12.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.groovy deleted file mode 100644 index 52ce1a04b62a5d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q13.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.groovy deleted file mode 100644 index 28d7599970a0e0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q14.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.groovy deleted file mode 100644 index 00897a7819000d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q15.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.groovy deleted file mode 100644 index 8d682c8edb9004..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q16.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - p_brand, - p_type, - p_size, - count(distinct ps_suppkey) as supplier_cnt - from - partsupp, - part - where - p_partkey = ps_partkey - and p_brand <> 'Brand#45' - and p_type not like 'MEDIUM POLISHED%' - and p_size in (49, 14, 23, 45, 19, 3, 36, 9) - and ps_suppkey not in ( - select - s_suppkey - from - supplier - where - s_comment like '%Customer%Complaints%' - ) - group by - p_brand, - p_type, - p_size - order by - supplier_cnt desc, - p_brand, - p_type, - p_size; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.groovy deleted file mode 100644 index 52937ee96b7b84..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q17.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.groovy deleted file mode 100644 index 2e4245d06ee338..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q18.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice, - sum(l_quantity) - from - customer, - orders, - lineitem - where - o_orderkey in ( - select - l_orderkey - from - lineitem - group by - l_orderkey having - sum(l_quantity) > 300 - ) - and c_custkey = o_custkey - and o_orderkey = l_orderkey - group by - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice - order by - o_totalprice desc, - o_orderdate - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.groovy deleted file mode 100644 index c2fffd575f7e3a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q19.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.groovy deleted file mode 100644 index cabce2fc86697b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q2.groovy +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - s_acctbal, - s_name, - n_name, - p_partkey, - p_mfgr, - s_address, - s_phone, - s_comment - from - part, - supplier, - partsupp, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and p_size = 15 - and p_type like '%BRASS' - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - and ps_supplycost = ( - select - min(ps_supplycost) - from - partsupp, - supplier, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - ) - order by - s_acctbal desc, - n_name, - s_name, - p_partkey - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy deleted file mode 100644 index 32efcdde07b9a0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20-rewrite") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan -select -s_name, s_address -from -supplier left semi join -( - select * from - ( - select l_partkey,l_suppkey, 0.5 * sum(l_quantity) as l_q - from lineitem - where l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - group by l_partkey,l_suppkey - ) t2 join - ( - select ps_partkey, ps_suppkey, ps_availqty - from partsupp left semi join part - on ps_partkey = p_partkey and p_name like 'forest%' - ) t1 - on t2.l_partkey = t1.ps_partkey and t2.l_suppkey = t1.ps_suppkey - and t1.ps_availqty > t2.l_q -) t3 -on s_suppkey = t3.ps_suppkey -join nation -where s_nationkey = n_nationkey - and n_name = 'CANADA' -order by s_name -; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.groovy deleted file mode 100644 index ea9819fd3cfed5..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q20.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - s_address - from - supplier, - nation - where - s_suppkey in ( - select - ps_suppkey - from - partsupp - where - ps_partkey in ( - select - p_partkey - from - part - where - p_name like 'forest%' - ) - and ps_availqty > ( - select - 0.5 * sum(l_quantity) - from - lineitem - where - l_partkey = ps_partkey - and l_suppkey = ps_suppkey - and l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - ) - ) - and s_nationkey = n_nationkey - and n_name = 'CANADA' - order by - s_name; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.groovy deleted file mode 100644 index 28b5d0f7648fef..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q21.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - count(*) as numwait - from - supplier, - lineitem l1, - orders, - nation - where - s_suppkey = l1.l_suppkey - and o_orderkey = l1.l_orderkey - and o_orderstatus = 'F' - and l1.l_receiptdate > l1.l_commitdate - and exists ( - select - * - from - lineitem l2 - where - l2.l_orderkey = l1.l_orderkey - and l2.l_suppkey <> l1.l_suppkey - ) - and not exists ( - select - * - from - lineitem l3 - where - l3.l_orderkey = l1.l_orderkey - and l3.l_suppkey <> l1.l_suppkey - and l3.l_receiptdate > l3.l_commitdate - ) - and s_nationkey = n_nationkey - and n_name = 'SAUDI ARABIA' - group by - s_name - order by - numwait desc, - s_name - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.groovy deleted file mode 100644 index d363362a329540..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q22.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - cntrycode, - count(*) as numcust, - sum(c_acctbal) as totacctbal - from - ( - select - substring(c_phone, 1, 2) as cntrycode, - c_acctbal - from - customer - where - substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - and c_acctbal > ( - select - avg(c_acctbal) - from - customer - where - c_acctbal > 0.00 - and substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - ) - and not exists ( - select - * - from - orders - where - o_custkey = c_custkey - ) - ) as custsale - group by - cntrycode - order by - cntrycode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.groovy deleted file mode 100644 index cc0f648b264a5f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q3.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.groovy deleted file mode 100644 index 3454d5bb6fef9d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q4.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql 'set parallel_pipeline_task_num=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - qt_select """ - explain shape plan - select - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.groovy deleted file mode 100644 index f49ff1f561aef6..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q5.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.groovy deleted file mode 100644 index 43da4d59e6ba78..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q6.groovy +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - sum(l_extendedprice * l_discount) as revenue - from - lineitem - where - l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - and l_discount between .06 - 0.01 and .06 + 0.01 - and l_quantity < 24; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.groovy deleted file mode 100644 index bac6387a3cb048..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q7.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.groovy deleted file mode 100644 index 44199ff620ebdf..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q8.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.groovy deleted file mode 100644 index 263f4f6f16d914..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/nostats_rf_prune/q9.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q1.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q1.groovy deleted file mode 100644 index f90bc9891cb428..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q1.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, - sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order - from - lineitem - where - l_shipdate <= date '1998-12-01' - interval '90' day - group by - l_returnflag, - l_linestatus - order by - l_returnflag, - l_linestatus; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q10.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q10.groovy deleted file mode 100644 index 7079db86ee7c77..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q10.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q11.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q11.groovy deleted file mode 100644 index 6c3ad7913a6958..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q11.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - select - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q12.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q12.groovy deleted file mode 100644 index 75846854982e85..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q12.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q13.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q13.groovy deleted file mode 100644 index bb49201e823132..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q13.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q14.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q14.groovy deleted file mode 100644 index 04f031f141ff23..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q14.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q15.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q15.groovy deleted file mode 100644 index 4c4288133d3e70..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q15.groovy +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q16.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q16.groovy deleted file mode 100644 index 4927b90c071a7f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q16.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - p_brand, - p_type, - p_size, - count(distinct ps_suppkey) as supplier_cnt - from - partsupp, - part - where - p_partkey = ps_partkey - and p_brand <> 'Brand#45' - and p_type not like 'MEDIUM POLISHED%' - and p_size in (49, 14, 23, 45, 19, 3, 36, 9) - and ps_suppkey not in ( - select - s_suppkey - from - supplier - where - s_comment like '%Customer%Complaints%' - ) - group by - p_brand, - p_type, - p_size - order by - supplier_cnt desc, - p_brand, - p_type, - p_size; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q17.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q17.groovy deleted file mode 100644 index 748d16fc9d537d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q17.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q18.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q18.groovy deleted file mode 100644 index 9ad92d40f2e8ed..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q18.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice, - sum(l_quantity) - from - customer, - orders, - lineitem - where - o_orderkey in ( - select - l_orderkey - from - lineitem - group by - l_orderkey having - sum(l_quantity) > 300 - ) - and c_custkey = o_custkey - and o_orderkey = l_orderkey - group by - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice - order by - o_totalprice desc, - o_orderdate - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q19.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q19.groovy deleted file mode 100644 index 86b4ea66b18a6c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q19.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q2.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q2.groovy deleted file mode 100644 index bde54070a72c3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q2.groovy +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - s_acctbal, - s_name, - n_name, - p_partkey, - p_mfgr, - s_address, - s_phone, - s_comment - from - part, - supplier, - partsupp, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and p_size = 15 - and p_type like '%BRASS' - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - and ps_supplycost = ( - select - min(ps_supplycost) - from - partsupp, - supplier, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - ) - order by - s_acctbal desc, - n_name, - s_name, - p_partkey - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.groovy deleted file mode 100644 index 0f618516b8be62..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20-rewrite.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20-rewrite") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan -select -s_name, s_address -from -supplier left semi join -( - select * from - ( - select l_partkey,l_suppkey, 0.5 * sum(l_quantity) as l_q - from lineitem - where l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - group by l_partkey,l_suppkey - ) t2 join - ( - select ps_partkey, ps_suppkey, ps_availqty - from partsupp left semi join part - on ps_partkey = p_partkey and p_name like 'forest%' - ) t1 - on t2.l_partkey = t1.ps_partkey and t2.l_suppkey = t1.ps_suppkey - and t1.ps_availqty > t2.l_q -) t3 -on s_suppkey = t3.ps_suppkey -join nation -where s_nationkey = n_nationkey - and n_name = 'CANADA' -order by s_name -; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20.groovy deleted file mode 100644 index 50315e7a95aafd..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q20.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - s_address - from - supplier, - nation - where - s_suppkey in ( - select - ps_suppkey - from - partsupp - where - ps_partkey in ( - select - p_partkey - from - part - where - p_name like 'forest%' - ) - and ps_availqty > ( - select - 0.5 * sum(l_quantity) - from - lineitem - where - l_partkey = ps_partkey - and l_suppkey = ps_suppkey - and l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - ) - ) - and s_nationkey = n_nationkey - and n_name = 'CANADA' - order by - s_name; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q21.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q21.groovy deleted file mode 100644 index ee7e8f9be4cd98..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q21.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - count(*) as numwait - from - supplier, - lineitem l1, - orders, - nation - where - s_suppkey = l1.l_suppkey - and o_orderkey = l1.l_orderkey - and o_orderstatus = 'F' - and l1.l_receiptdate > l1.l_commitdate - and exists ( - select - * - from - lineitem l2 - where - l2.l_orderkey = l1.l_orderkey - and l2.l_suppkey <> l1.l_suppkey - ) - and not exists ( - select - * - from - lineitem l3 - where - l3.l_orderkey = l1.l_orderkey - and l3.l_suppkey <> l1.l_suppkey - and l3.l_receiptdate > l3.l_commitdate - ) - and s_nationkey = n_nationkey - and n_name = 'SAUDI ARABIA' - group by - s_name - order by - numwait desc, - s_name - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q22.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q22.groovy deleted file mode 100644 index c14fdbe4a4c172..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q22.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - cntrycode, - count(*) as numcust, - sum(c_acctbal) as totacctbal - from - ( - select - substring(c_phone, 1, 2) as cntrycode, - c_acctbal - from - customer - where - substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - and c_acctbal > ( - select - avg(c_acctbal) - from - customer - where - c_acctbal > 0.00 - and substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - ) - and not exists ( - select - * - from - orders - where - o_custkey = c_custkey - ) - ) as custsale - group by - cntrycode - order by - cntrycode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q3.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q3.groovy deleted file mode 100644 index 836a30172eb0ec..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q3.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q4.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q4.groovy deleted file mode 100644 index de22ca1cafda73..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q4.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql 'set parallel_pipeline_task_num=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - qt_select """ - explain shape plan - select - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q5.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q5.groovy deleted file mode 100644 index ba5abe1ed4719e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q5.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q6.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q6.groovy deleted file mode 100644 index 5a03ad454af74d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q6.groovy +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - sum(l_extendedprice * l_discount) as revenue - from - lineitem - where - l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - and l_discount between .06 - 0.01 and .06 + 0.01 - and l_quantity < 24; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q7.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q7.groovy deleted file mode 100644 index 08c7532ba994bc..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q7.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q8.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q8.groovy deleted file mode 100644 index 56455092954780..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q8.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q9.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q9.groovy deleted file mode 100644 index 8b5b664928891c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/rf_prune/q9.groovy +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set forbid_unknown_col_stats=true' -sql 'set enable_runtime_filter_prune=true' -sql 'set enable_stats=true' -sql 'set enable_left_zig_zag=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy deleted file mode 100644 index 3d9b2012693ce3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("test_pushdown_setop") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'set be_number_for_test=3' - sql 'set parallel_fragment_exec_instance_num=8; ' - sql 'set parallel_pipeline_task_num=8; ' - sql 'set forbid_unknown_col_stats=true' - sql 'set enable_nereids_timeout = false' - sql 'set enable_runtime_filter_prune=false' - sql 'set runtime_filter_type=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_rf_setop """ - explain shape plan - select count() from ((select l_linenumber from lineitem) except (select o_orderkey from orders)) T join region on T.l_linenumber = r_regionkey; - """ - - qt_rf_setop_expr """ - explain shape plan select count() from ((select l_linenumber from lineitem) except (select o_orderkey from orders)) T join region on abs(T.l_linenumber) = r_regionkey; - """ -} - diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q1.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q1.groovy deleted file mode 100644 index 76bffda65f8c4a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q1.groovy +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" - sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, - sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order - from - lineitem - where - l_shipdate <= date '1998-12-01' - interval '90' day - group by - l_returnflag, - l_linestatus - order by - l_returnflag, - l_linestatus; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q10.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q10.groovy deleted file mode 100644 index 89ca6dc051fd3f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q10.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q11.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q11.groovy deleted file mode 100644 index b21ba41bf4187b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q11.groovy +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - qt_select """ - explain shape plan - select - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q12.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q12.groovy deleted file mode 100644 index ced1db1a561937..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q12.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q13.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q13.groovy deleted file mode 100644 index b7eaca41ac5d36..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q13.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q14.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q14.groovy deleted file mode 100644 index 5617134c86680c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q14.groovy +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q15.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q15.groovy deleted file mode 100644 index 02ad529f9b314d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q15.groovy +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q16.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q16.groovy deleted file mode 100644 index 333b6d837cac19..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q16.groovy +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - p_brand, - p_type, - p_size, - count(distinct ps_suppkey) as supplier_cnt - from - partsupp, - part - where - p_partkey = ps_partkey - and p_brand <> 'Brand#45' - and p_type not like 'MEDIUM POLISHED%' - and p_size in (49, 14, 23, 45, 19, 3, 36, 9) - and ps_suppkey not in ( - select - s_suppkey - from - supplier - where - s_comment like '%Customer%Complaints%' - ) - group by - p_brand, - p_type, - p_size - order by - supplier_cnt desc, - p_brand, - p_type, - p_size; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q17.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q17.groovy deleted file mode 100644 index 75ee0508980951..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q17.groovy +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q18.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q18.groovy deleted file mode 100644 index 425d7da6119e43..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q18.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice, - sum(l_quantity) - from - customer, - orders, - lineitem - where - o_orderkey in ( - select - l_orderkey - from - lineitem - group by - l_orderkey having - sum(l_quantity) > 300 - ) - and c_custkey = o_custkey - and o_orderkey = l_orderkey - group by - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice - order by - o_totalprice desc, - o_orderdate - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q19.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q19.groovy deleted file mode 100644 index f5a3aa43a8dd29..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q19.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q2.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q2.groovy deleted file mode 100644 index ae39c1116b805d..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q2.groovy +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_acctbal, - s_name, - n_name, - p_partkey, - p_mfgr, - s_address, - s_phone, - s_comment - from - part, - supplier, - partsupp, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and p_size = 15 - and p_type like '%BRASS' - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - and ps_supplycost = ( - select - min(ps_supplycost) - from - partsupp, - supplier, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - ) - order by - s_acctbal desc, - n_name, - s_name, - p_partkey - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.groovy deleted file mode 100644 index 97e6a2272c60e4..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20-rewrite.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20-rewrite") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan -select -s_name, s_address -from -supplier left semi join -( - select * from - ( - select l_partkey,l_suppkey, 0.5 * sum(l_quantity) as l_q - from lineitem - where l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - group by l_partkey,l_suppkey - ) t2 join - ( - select ps_partkey, ps_suppkey, ps_availqty - from partsupp left semi join part - on ps_partkey = p_partkey and p_name like 'forest%' - ) t1 - on t2.l_partkey = t1.ps_partkey and t2.l_suppkey = t1.ps_suppkey - and t1.ps_availqty > t2.l_q -) t3 -on s_suppkey = t3.ps_suppkey -join nation -where s_nationkey = n_nationkey - and n_name = 'CANADA' -order by s_name -; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20.groovy deleted file mode 100644 index f24004cec88626..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q20.groovy +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - s_address - from - supplier, - nation - where - s_suppkey in ( - select - ps_suppkey - from - partsupp - where - ps_partkey in ( - select - p_partkey - from - part - where - p_name like 'forest%' - ) - and ps_availqty > ( - select - 0.5 * sum(l_quantity) - from - lineitem - where - l_partkey = ps_partkey - and l_suppkey = ps_suppkey - and l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - ) - ) - and s_nationkey = n_nationkey - and n_name = 'CANADA' - order by - s_name; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q21.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q21.groovy deleted file mode 100644 index 6bc96c09407a3e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q21.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - count(*) as numwait - from - supplier, - lineitem l1, - orders, - nation - where - s_suppkey = l1.l_suppkey - and o_orderkey = l1.l_orderkey - and o_orderstatus = 'F' - and l1.l_receiptdate > l1.l_commitdate - and exists ( - select - * - from - lineitem l2 - where - l2.l_orderkey = l1.l_orderkey - and l2.l_suppkey <> l1.l_suppkey - ) - and not exists ( - select - * - from - lineitem l3 - where - l3.l_orderkey = l1.l_orderkey - and l3.l_suppkey <> l1.l_suppkey - and l3.l_receiptdate > l3.l_commitdate - ) - and s_nationkey = n_nationkey - and n_name = 'SAUDI ARABIA' - group by - s_name - order by - numwait desc, - s_name - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q22.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q22.groovy deleted file mode 100644 index e4c5d7ceaa2411..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q22.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - cntrycode, - count(*) as numcust, - sum(c_acctbal) as totacctbal - from - ( - select - substring(c_phone, 1, 2) as cntrycode, - c_acctbal - from - customer - where - substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - and c_acctbal > ( - select - avg(c_acctbal) - from - customer - where - c_acctbal > 0.00 - and substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - ) - and not exists ( - select - * - from - orders - where - o_custkey = c_custkey - ) - ) as custsale - group by - cntrycode - order by - cntrycode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q3.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q3.groovy deleted file mode 100644 index 3f4693ae74591c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q3.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q4.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q4.groovy deleted file mode 100644 index 6a68d84009ddf7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q4.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql 'set parallel_pipeline_task_num=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - - qt_select """ - explain shape plan - select - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q5.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q5.groovy deleted file mode 100644 index b334dbbbf3bec7..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q5.groovy +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q6.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q6.groovy deleted file mode 100644 index 5f0e58c5d0d421..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q6.groovy +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice * l_discount) as revenue - from - lineitem - where - l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - and l_discount between .06 - 0.01 and .06 + 0.01 - and l_quantity < 24; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q7.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q7.groovy deleted file mode 100644 index 36eacb5f6fd120..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q7.groovy +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q8.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q8.groovy deleted file mode 100644 index de9e89b2261f42..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q8.groovy +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q9.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q9.groovy deleted file mode 100644 index 7ee7e2ec186f7b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape/q9.groovy +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.groovy deleted file mode 100644 index dfb5a6a0d2e83b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q1.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q1") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - qt_select """ - explain shape plan - select - l_returnflag, - l_linestatus, - sum(l_quantity) as sum_qty, - sum(l_extendedprice) as sum_base_price, - sum(l_extendedprice * (1 - l_discount)) as sum_disc_price, - sum(l_extendedprice * (1 - l_discount) * (1 + l_tax)) as sum_charge, - avg(l_quantity) as avg_qty, - avg(l_extendedprice) as avg_price, - avg(l_discount) as avg_disc, - count(*) as count_order - from - lineitem - where - l_shipdate <= date '1998-12-01' - interval '90' day - group by - l_returnflag, - l_linestatus - order by - l_returnflag, - l_linestatus; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.groovy deleted file mode 100644 index 30cc6abdbdef4c..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q10.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q10") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_custkey, - c_name, - sum(l_extendedprice * (1 - l_discount)) as revenue, - c_acctbal, - n_name, - c_address, - c_phone, - c_comment - from - customer, - orders, - lineitem, - nation - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate >= date '1993-10-01' - and o_orderdate < date '1993-10-01' + interval '3' month - and l_returnflag = 'R' - and c_nationkey = n_nationkey - group by - c_custkey, - c_name, - c_acctbal, - c_phone, - n_name, - c_address, - c_comment - order by - revenue desc - limit 20; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.groovy deleted file mode 100644 index fd49b25488a426..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q11.groovy +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q11") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - qt_select """ - explain shape plan - select - ps_partkey, - sum(ps_supplycost * ps_availqty) as value - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - group by - ps_partkey having - sum(ps_supplycost * ps_availqty) > ( - select - sum(ps_supplycost * ps_availqty) * 0.000002 - from - partsupp, - supplier, - nation - where - ps_suppkey = s_suppkey - and s_nationkey = n_nationkey - and n_name = 'GERMANY' - ) - order by - value desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.groovy deleted file mode 100644 index 3b2b4fe1f536a3..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q12.groovy +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q12") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - l_shipmode, - sum(case - when o_orderpriority = '1-URGENT' - or o_orderpriority = '2-HIGH' - then 1 - else 0 - end) as high_line_count, - sum(case - when o_orderpriority <> '1-URGENT' - and o_orderpriority <> '2-HIGH' - then 1 - else 0 - end) as low_line_count - from - orders, - lineitem - where - o_orderkey = l_orderkey - and l_shipmode in ('MAIL', 'SHIP') - and l_commitdate < l_receiptdate - and l_shipdate < l_commitdate - and l_receiptdate >= date '1994-01-01' - and l_receiptdate < date '1994-01-01' + interval '1' year - group by - l_shipmode - order by - l_shipmode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.groovy deleted file mode 100644 index 72252a9779675b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q13.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q13") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - c_count, - count(*) as custdist - from - ( - select - c_custkey, - count(o_orderkey) as c_count - from - customer left outer join orders on - c_custkey = o_custkey - and o_comment not like '%special%requests%' - group by - c_custkey - ) as c_orders - group by - c_count - order by - custdist desc, - c_count desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.groovy deleted file mode 100644 index f9a0eeefd33fa0..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q14.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q14") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - qt_select """ - explain shape plan - select - 100.00 * sum(case - when p_type like 'PROMO%' - then l_extendedprice * (1 - l_discount) - else 0 - end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue - from - lineitem, - part - where - l_partkey = p_partkey - and l_shipdate >= date '1995-09-01' - and l_shipdate < date '1995-09-01' + interval '1' month; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.groovy deleted file mode 100644 index b79e8d3855e80e..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q15.groovy +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q15") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_suppkey, - s_name, - s_address, - s_phone, - total_revenue - from - supplier, - revenue0 - where - s_suppkey = supplier_no - and total_revenue = ( - select - max(total_revenue) - from - revenue0 - ) - order by - s_suppkey; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.groovy deleted file mode 100644 index 2ebca626bbdf19..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q16.groovy +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q16") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - p_brand, - p_type, - p_size, - count(distinct ps_suppkey) as supplier_cnt - from - partsupp, - part - where - p_partkey = ps_partkey - and p_brand <> 'Brand#45' - and p_type not like 'MEDIUM POLISHED%' - and p_size in (49, 14, 23, 45, 19, 3, 36, 9) - and ps_suppkey not in ( - select - s_suppkey - from - supplier - where - s_comment like '%Customer%Complaints%' - ) - group by - p_brand, - p_type, - p_size - order by - supplier_cnt desc, - p_brand, - p_type, - p_size; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.groovy deleted file mode 100644 index 06aa3299d5022f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q17.groovy +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q17") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice) / 7.0 as avg_yearly - from - lineitem, - part - where - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container = 'MED BOX' - and l_quantity < ( - select - 0.2 * avg(l_quantity) - from - lineitem - where - l_partkey = p_partkey - ); - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.groovy deleted file mode 100644 index 4f5a07cfbc1371..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q18.groovy +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q18") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice, - sum(l_quantity) - from - customer, - orders, - lineitem - where - o_orderkey in ( - select - l_orderkey - from - lineitem - group by - l_orderkey having - sum(l_quantity) > 300 - ) - and c_custkey = o_custkey - and o_orderkey = l_orderkey - group by - c_name, - c_custkey, - o_orderkey, - o_orderdate, - o_totalprice - order by - o_totalprice desc, - o_orderdate - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.groovy deleted file mode 100644 index c800d256d5c66f..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q19.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q19") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice* (1 - l_discount)) as revenue - from - lineitem, - part - where - ( - p_partkey = l_partkey - and p_brand = 'Brand#12' - and p_container in ('SM CASE', 'SM BOX', 'SM PACK', 'SM PKG') - and l_quantity >= 1 and l_quantity <= 1 + 10 - and p_size between 1 and 5 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#23' - and p_container in ('MED BAG', 'MED BOX', 'MED PKG', 'MED PACK') - and l_quantity >= 10 and l_quantity <= 10 + 10 - and p_size between 1 and 10 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ) - or - ( - p_partkey = l_partkey - and p_brand = 'Brand#34' - and p_container in ('LG CASE', 'LG BOX', 'LG PACK', 'LG PKG') - and l_quantity >= 20 and l_quantity <= 20 + 10 - and p_size between 1 and 15 - and l_shipmode in ('AIR', 'AIR REG') - and l_shipinstruct = 'DELIVER IN PERSON' - ); - - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.groovy deleted file mode 100644 index ee920ca6e50d78..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q2.groovy +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q2") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_acctbal, - s_name, - n_name, - p_partkey, - p_mfgr, - s_address, - s_phone, - s_comment - from - part, - supplier, - partsupp, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and p_size = 15 - and p_type like '%BRASS' - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - and ps_supplycost = ( - select - min(ps_supplycost) - from - partsupp, - supplier, - nation, - region - where - p_partkey = ps_partkey - and s_suppkey = ps_suppkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'EUROPE' - ) - order by - s_acctbal desc, - n_name, - s_name, - p_partkey - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.groovy deleted file mode 100644 index c30a636d3ff73a..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20-rewrite.groovy +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20-rewrite") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set parallel_pipeline_task_num=8' - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan -select -s_name, s_address -from -supplier left semi join -( - select * from - ( - select l_partkey,l_suppkey, 0.5 * sum(l_quantity) as l_q - from lineitem - where l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - group by l_partkey,l_suppkey - ) t2 join - ( - select ps_partkey, ps_suppkey, ps_availqty - from partsupp left semi join part - on ps_partkey = p_partkey and p_name like 'forest%' - ) t1 - on t2.l_partkey = t1.ps_partkey and t2.l_suppkey = t1.ps_suppkey - and t1.ps_availqty > t2.l_q -) t3 -on s_suppkey = t3.ps_suppkey -join nation -where s_nationkey = n_nationkey - and n_name = 'CANADA' -order by s_name -; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.groovy deleted file mode 100644 index 4007273901e112..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q20.groovy +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q20") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - s_address - from - supplier, - nation - where - s_suppkey in ( - select - ps_suppkey - from - partsupp - where - ps_partkey in ( - select - p_partkey - from - part - where - p_name like 'forest%' - ) - and ps_availqty > ( - select - 0.5 * sum(l_quantity) - from - lineitem - where - l_partkey = ps_partkey - and l_suppkey = ps_suppkey - and l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - ) - ) - and s_nationkey = n_nationkey - and n_name = 'CANADA' - order by - s_name; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.groovy deleted file mode 100644 index 89216bd88a2b39..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q21.groovy +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q21") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - s_name, - count(*) as numwait - from - supplier, - lineitem l1, - orders, - nation - where - s_suppkey = l1.l_suppkey - and o_orderkey = l1.l_orderkey - and o_orderstatus = 'F' - and l1.l_receiptdate > l1.l_commitdate - and exists ( - select - * - from - lineitem l2 - where - l2.l_orderkey = l1.l_orderkey - and l2.l_suppkey <> l1.l_suppkey - ) - and not exists ( - select - * - from - lineitem l3 - where - l3.l_orderkey = l1.l_orderkey - and l3.l_suppkey <> l1.l_suppkey - and l3.l_receiptdate > l3.l_commitdate - ) - and s_nationkey = n_nationkey - and n_name = 'SAUDI ARABIA' - group by - s_name - order by - numwait desc, - s_name - limit 100; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.groovy deleted file mode 100644 index e2f7e1096b1786..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q22.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q22") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - cntrycode, - count(*) as numcust, - sum(c_acctbal) as totacctbal - from - ( - select - substring(c_phone, 1, 2) as cntrycode, - c_acctbal - from - customer - where - substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - and c_acctbal > ( - select - avg(c_acctbal) - from - customer - where - c_acctbal > 0.00 - and substring(c_phone, 1, 2) in - ('13', '31', '23', '29', '30', '18', '17') - ) - and not exists ( - select - * - from - orders - where - o_custkey = c_custkey - ) - ) as custsale - group by - cntrycode - order by - cntrycode; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.groovy deleted file mode 100644 index 01c926f1efc768..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q3.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q3") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - // db = "tpch" - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - l_orderkey, - sum(l_extendedprice * (1 - l_discount)) as revenue, - o_orderdate, - o_shippriority - from - customer, - orders, - lineitem - where - c_mktsegment = 'BUILDING' - and c_custkey = o_custkey - and l_orderkey = o_orderkey - and o_orderdate < date '1995-03-15' - and l_shipdate > date '1995-03-15' - group by - l_orderkey, - o_orderdate, - o_shippriority - order by - revenue desc, - o_orderdate - limit 10; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.groovy deleted file mode 100644 index 55ff31bb4577df..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q4.groovy +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q4") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql 'set parallel_pipeline_task_num=8' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - - - - qt_select """ - explain shape plan - select - o_orderpriority, - count(*) as order_count - from - orders - where - o_orderdate >= date '1993-07-01' - and o_orderdate < date '1993-07-01' + interval '3' month - and exists ( - select - * - from - lineitem - where - l_orderkey = o_orderkey - and l_commitdate < l_receiptdate - ) - group by - o_orderpriority - order by - o_orderpriority; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.groovy deleted file mode 100644 index bdc6d994c27245..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q5.groovy +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q5") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - n_name, - sum(l_extendedprice * (1 - l_discount)) as revenue - from - customer, - orders, - lineitem, - supplier, - nation, - region - where - c_custkey = o_custkey - and l_orderkey = o_orderkey - and l_suppkey = s_suppkey - and c_nationkey = s_nationkey - and s_nationkey = n_nationkey - and n_regionkey = r_regionkey - and r_name = 'ASIA' - and o_orderdate >= date '1994-01-01' - and o_orderdate < date '1994-01-01' + interval '1' year - group by - n_name - order by - revenue desc; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.groovy deleted file mode 100644 index f1474a164444af..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q6.groovy +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q6") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - sum(l_extendedprice * l_discount) as revenue - from - lineitem - where - l_shipdate >= date '1994-01-01' - and l_shipdate < date '1994-01-01' + interval '1' year - and l_discount between .06 - 0.01 and .06 + 0.01 - and l_quantity < 24; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.groovy deleted file mode 100644 index 2adc348b633426..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q7.groovy +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q7") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - supp_nation, - cust_nation, - l_year, - sum(volume) as revenue - from - ( - select - n1.n_name as supp_nation, - n2.n_name as cust_nation, - extract(year from l_shipdate) as l_year, - l_extendedprice * (1 - l_discount) as volume - from - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2 - where - s_suppkey = l_suppkey - and o_orderkey = l_orderkey - and c_custkey = o_custkey - and s_nationkey = n1.n_nationkey - and c_nationkey = n2.n_nationkey - and ( - (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') - or (n1.n_name = 'GERMANY' and n2.n_name = 'FRANCE') - ) - and l_shipdate between date '1995-01-01' and date '1996-12-31' - ) as shipping - group by - supp_nation, - cust_nation, - l_year - order by - supp_nation, - cust_nation, - l_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.groovy deleted file mode 100644 index f99324ba64cb5b..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q8.groovy +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q8") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - o_year, - sum(case - when nation = 'BRAZIL' then volume - else 0 - end) / sum(volume) as mkt_share - from - ( - select - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) as volume, - n2.n_name as nation - from - part, - supplier, - lineitem, - orders, - customer, - nation n1, - nation n2, - region - where - p_partkey = l_partkey - and s_suppkey = l_suppkey - and l_orderkey = o_orderkey - and o_custkey = c_custkey - and c_nationkey = n1.n_nationkey - and n1.n_regionkey = r_regionkey - and r_name = 'AMERICA' - and s_nationkey = n2.n_nationkey - and o_orderdate between date '1995-01-01' and date '1996-12-31' - and p_type = 'ECONOMY ANODIZED STEEL' - ) as all_nations - group by - o_year - order by - o_year; - """ -} diff --git a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.groovy b/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.groovy deleted file mode 100644 index 692afad084f535..00000000000000 --- a/regression-test/suites/new_shapes_p0/tpch_sf1000/shape_no_stats/q9.groovy +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -suite("q9") { - if (isCloudMode()) { - return - } - String db = context.config.getDbNameByFile(new File(context.file.parent)) - sql "use ${db}" - sql 'set enable_nereids_planner=true' - sql 'set enable_nereids_distribute_planner=true' - sql 'set enable_fallback_to_original_planner=false' - sql "set runtime_filter_mode='GLOBAL'" - - sql 'set exec_mem_limit=21G' - sql 'SET enable_pipeline_engine = true' - sql 'set parallel_pipeline_task_num=8' - - - - sql 'set be_number_for_test=3' - sql "set runtime_filter_type=8" -sql 'set enable_runtime_filter_prune=false' -sql 'set forbid_unknown_col_stats=false;' -sql 'set enable_runtime_filter_prune=false' -sql 'set enable_stats=false' - sql "set disable_nereids_rules=PRUNE_EMPTY_PARTITION" - - - qt_select """ - explain shape plan - select - nation, - o_year, - sum(amount) as sum_profit - from - ( - select - n_name as nation, - extract(year from o_orderdate) as o_year, - l_extendedprice * (1 - l_discount) - ps_supplycost * l_quantity as amount - from - part, - supplier, - lineitem, - partsupp, - orders, - nation - where - s_suppkey = l_suppkey - and ps_suppkey = l_suppkey - and ps_partkey = l_partkey - and p_partkey = l_partkey - and o_orderkey = l_orderkey - and s_nationkey = n_nationkey - and p_name like '%green%' - ) as profit - group by - nation, - o_year - order by - nation, - o_year desc; - """ -} diff --git a/regression-test/suites/nereids_clickbench_shape_p0/load.groovy b/regression-test/suites/shape_check/clickbench/load.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/load.groovy rename to regression-test/suites/shape_check/clickbench/load.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query1.groovy b/regression-test/suites/shape_check/clickbench/query1.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query1.groovy rename to regression-test/suites/shape_check/clickbench/query1.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query10.groovy b/regression-test/suites/shape_check/clickbench/query10.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query10.groovy rename to regression-test/suites/shape_check/clickbench/query10.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query11.groovy b/regression-test/suites/shape_check/clickbench/query11.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query11.groovy rename to regression-test/suites/shape_check/clickbench/query11.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query12.groovy b/regression-test/suites/shape_check/clickbench/query12.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query12.groovy rename to regression-test/suites/shape_check/clickbench/query12.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query13.groovy b/regression-test/suites/shape_check/clickbench/query13.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query13.groovy rename to regression-test/suites/shape_check/clickbench/query13.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query14.groovy b/regression-test/suites/shape_check/clickbench/query14.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query14.groovy rename to regression-test/suites/shape_check/clickbench/query14.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query15.groovy b/regression-test/suites/shape_check/clickbench/query15.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query15.groovy rename to regression-test/suites/shape_check/clickbench/query15.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query16.groovy b/regression-test/suites/shape_check/clickbench/query16.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query16.groovy rename to regression-test/suites/shape_check/clickbench/query16.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query17.groovy b/regression-test/suites/shape_check/clickbench/query17.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query17.groovy rename to regression-test/suites/shape_check/clickbench/query17.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query18.groovy b/regression-test/suites/shape_check/clickbench/query18.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query18.groovy rename to regression-test/suites/shape_check/clickbench/query18.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query19.groovy b/regression-test/suites/shape_check/clickbench/query19.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query19.groovy rename to regression-test/suites/shape_check/clickbench/query19.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query2.groovy b/regression-test/suites/shape_check/clickbench/query2.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query2.groovy rename to regression-test/suites/shape_check/clickbench/query2.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query20.groovy b/regression-test/suites/shape_check/clickbench/query20.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query20.groovy rename to regression-test/suites/shape_check/clickbench/query20.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query21.groovy b/regression-test/suites/shape_check/clickbench/query21.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query21.groovy rename to regression-test/suites/shape_check/clickbench/query21.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query22.groovy b/regression-test/suites/shape_check/clickbench/query22.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query22.groovy rename to regression-test/suites/shape_check/clickbench/query22.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query23.groovy b/regression-test/suites/shape_check/clickbench/query23.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query23.groovy rename to regression-test/suites/shape_check/clickbench/query23.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query24.groovy b/regression-test/suites/shape_check/clickbench/query24.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query24.groovy rename to regression-test/suites/shape_check/clickbench/query24.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query25.groovy b/regression-test/suites/shape_check/clickbench/query25.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query25.groovy rename to regression-test/suites/shape_check/clickbench/query25.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query26.groovy b/regression-test/suites/shape_check/clickbench/query26.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query26.groovy rename to regression-test/suites/shape_check/clickbench/query26.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query27.groovy b/regression-test/suites/shape_check/clickbench/query27.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query27.groovy rename to regression-test/suites/shape_check/clickbench/query27.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query28.groovy b/regression-test/suites/shape_check/clickbench/query28.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query28.groovy rename to regression-test/suites/shape_check/clickbench/query28.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query29.groovy b/regression-test/suites/shape_check/clickbench/query29.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query29.groovy rename to regression-test/suites/shape_check/clickbench/query29.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query3.groovy b/regression-test/suites/shape_check/clickbench/query3.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query3.groovy rename to regression-test/suites/shape_check/clickbench/query3.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query30.groovy b/regression-test/suites/shape_check/clickbench/query30.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query30.groovy rename to regression-test/suites/shape_check/clickbench/query30.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query31.groovy b/regression-test/suites/shape_check/clickbench/query31.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query31.groovy rename to regression-test/suites/shape_check/clickbench/query31.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query32.groovy b/regression-test/suites/shape_check/clickbench/query32.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query32.groovy rename to regression-test/suites/shape_check/clickbench/query32.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query33.groovy b/regression-test/suites/shape_check/clickbench/query33.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query33.groovy rename to regression-test/suites/shape_check/clickbench/query33.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query34.groovy b/regression-test/suites/shape_check/clickbench/query34.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query34.groovy rename to regression-test/suites/shape_check/clickbench/query34.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query35.groovy b/regression-test/suites/shape_check/clickbench/query35.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query35.groovy rename to regression-test/suites/shape_check/clickbench/query35.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query36.groovy b/regression-test/suites/shape_check/clickbench/query36.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query36.groovy rename to regression-test/suites/shape_check/clickbench/query36.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query37.groovy b/regression-test/suites/shape_check/clickbench/query37.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query37.groovy rename to regression-test/suites/shape_check/clickbench/query37.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query38.groovy b/regression-test/suites/shape_check/clickbench/query38.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query38.groovy rename to regression-test/suites/shape_check/clickbench/query38.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query39.groovy b/regression-test/suites/shape_check/clickbench/query39.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query39.groovy rename to regression-test/suites/shape_check/clickbench/query39.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query4.groovy b/regression-test/suites/shape_check/clickbench/query4.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query4.groovy rename to regression-test/suites/shape_check/clickbench/query4.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query40.groovy b/regression-test/suites/shape_check/clickbench/query40.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query40.groovy rename to regression-test/suites/shape_check/clickbench/query40.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query41.groovy b/regression-test/suites/shape_check/clickbench/query41.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query41.groovy rename to regression-test/suites/shape_check/clickbench/query41.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query42.groovy b/regression-test/suites/shape_check/clickbench/query42.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query42.groovy rename to regression-test/suites/shape_check/clickbench/query42.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query43.groovy b/regression-test/suites/shape_check/clickbench/query43.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query43.groovy rename to regression-test/suites/shape_check/clickbench/query43.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query5.groovy b/regression-test/suites/shape_check/clickbench/query5.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query5.groovy rename to regression-test/suites/shape_check/clickbench/query5.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query6.groovy b/regression-test/suites/shape_check/clickbench/query6.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query6.groovy rename to regression-test/suites/shape_check/clickbench/query6.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query7.groovy b/regression-test/suites/shape_check/clickbench/query7.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query7.groovy rename to regression-test/suites/shape_check/clickbench/query7.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query8.groovy b/regression-test/suites/shape_check/clickbench/query8.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query8.groovy rename to regression-test/suites/shape_check/clickbench/query8.groovy diff --git a/regression-test/suites/nereids_clickbench_shape_p0/query9.groovy b/regression-test/suites/shape_check/clickbench/query9.groovy similarity index 100% rename from regression-test/suites/nereids_clickbench_shape_p0/query9.groovy rename to regression-test/suites/shape_check/clickbench/query9.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/load.groovy b/regression-test/suites/shape_check/ssb_sf100/load.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/load.groovy rename to regression-test/suites/shape_check/ssb_sf100/load.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/flat.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/flat.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/flat.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/flat.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.1.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q1.1.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.1.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q1.1.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.2.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q1.2.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.2.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q1.2.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.3.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q1.3.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q1.3.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q1.3.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.1.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q2.1.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.1.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q2.1.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.2.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q2.2.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.2.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q2.2.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.3.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q2.3.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q2.3.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q2.3.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.1.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q3.1.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.1.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q3.1.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.2.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q3.2.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.2.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q3.2.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.3.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q3.3.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.3.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q3.3.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.4.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q3.4.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q3.4.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q3.4.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.1.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q4.1.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.1.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q4.1.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.2.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q4.2.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.2.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q4.2.groovy diff --git a/regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.3.groovy b/regression-test/suites/shape_check/ssb_sf100/shape/q4.3.groovy similarity index 100% rename from regression-test/suites/nereids_ssb_shape_sf100_p0/shape/q4.3.groovy rename to regression-test/suites/shape_check/ssb_sf100/shape/q4.3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/constraints/load.groovy b/regression-test/suites/shape_check/tpcds_sf100/constraints/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/constraints/load.groovy rename to regression-test/suites/shape_check/tpcds_sf100/constraints/load.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/constraints/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/constraints/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/constraints/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/constraints/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/gen_rf_prune.py b/regression-test/suites/shape_check/tpcds_sf100/ddl/gen_rf_prune.py similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/gen_rf_prune.py rename to regression-test/suites/shape_check/tpcds_sf100/ddl/gen_rf_prune.py diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/gen_shape.py b/regression-test/suites/shape_check/tpcds_sf100/ddl/gen_shape.py similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/gen_shape.py rename to regression-test/suites/shape_check/tpcds_sf100/ddl/gen_shape.py diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/rf_prune.tmpl b/regression-test/suites/shape_check/tpcds_sf100/ddl/rf_prune.tmpl similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/rf_prune.tmpl rename to regression-test/suites/shape_check/tpcds_sf100/ddl/rf_prune.tmpl diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/shape.tmpl b/regression-test/suites/shape_check/tpcds_sf100/ddl/shape.tmpl similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/ddl/shape.tmpl rename to regression-test/suites/shape_check/tpcds_sf100/ddl/shape.tmpl diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/load.groovy b/regression-test/suites/shape_check/tpcds_sf100/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/load.groovy rename to regression-test/suites/shape_check/tpcds_sf100/load.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.groovy b/regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/noStatsRfPrune/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf100/noStatsRfPrune/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/no_stats_shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf100/no_stats_shape/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query1.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query10.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query11.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query12.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query13.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query14.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query15.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query16.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query17.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query18.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query19.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query2.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query20.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query21.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query22.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query24.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query25.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query26.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query27.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query28.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query29.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query3.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query30.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query31.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query32.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query33.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query34.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query35.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query36.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query37.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query38.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query39.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query4.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query40.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query41.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query42.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query43.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query44.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query45.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query46.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query47.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query48.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query49.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query5.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query50.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query51.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query52.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query53.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query54.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query55.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query56.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query57.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query58.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query59.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query6.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query60.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query61.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query62.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query63.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query64.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query65.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query66.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query67.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query68.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query69.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query7.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query70.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query71.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query72.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query73.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query74.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query75.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query76.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query77.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query78.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query79.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query8.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query80.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query81.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query82.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query83.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query84.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query85.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query86.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query87.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query88.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query89.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query9.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query90.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query91.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query92.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query93.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query94.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query95.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query96.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query97.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query98.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query99.groovy b/regression-test/suites/shape_check/tpcds_sf100/rf_prune/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/rf_prune/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf100/rf_prune/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query33.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query35.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query83.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/tpcds_sf100_stats.groovy b/regression-test/suites/shape_check/tpcds_sf100/shape/tpcds_sf100_stats.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf100_p0/shape/tpcds_sf100_stats.groovy rename to regression-test/suites/shape_check/tpcds_sf100/shape/tpcds_sf100_stats.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/bs_downgrade_shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/bs_downgrade_shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/gen_shape.py b/regression-test/suites/shape_check/tpcds_sf1000/ddl/gen_shape.py similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/gen_shape.py rename to regression-test/suites/shape_check/tpcds_sf1000/ddl/gen_shape.py diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/shape.tmpl b/regression-test/suites/shape_check/tpcds_sf1000/ddl/shape.tmpl similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/ddl/shape.tmpl rename to regression-test/suites/shape_check/tpcds_sf1000/ddl/shape.tmpl diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/eliminate_empty/query10_empty.groovy b/regression-test/suites/shape_check/tpcds_sf1000/eliminate_empty/query10_empty.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/eliminate_empty/query10_empty.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/eliminate_empty/query10_empty.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query1.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query1.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query10.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query10.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query11.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query11.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query12.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query12.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query13.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query13.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query14.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query14.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query15.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query15.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query16.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query16.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query17.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query17.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query18.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query18.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query19.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query19.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query2.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query2.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query20.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query20.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query21.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query21.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query22.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query22.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query23.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query23.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query24.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query24.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query25.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query25.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query26.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query26.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query27.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query27.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query28.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query28.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query29.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query29.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query3.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query3.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query30.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query30.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query31.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query31.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query32.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query32.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query34.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query34.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query36.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query36.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query37.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query37.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query38.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query38.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query39.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query39.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query4.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query4.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query40.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query40.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query41.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query41.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query42.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query42.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query43.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query43.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query44.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query44.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query45.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query45.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query46.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query46.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query47.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query47.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query48.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query48.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query49.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query49.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query5.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query5.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query50.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query50.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query51.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query51.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query52.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query52.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query53.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query53.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query54.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query54.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query55.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query55.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query56.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query56.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query57.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query57.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query58.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query58.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query59.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query59.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query6.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query6.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query60.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query60.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query61.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query61.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query62.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query62.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query63.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query63.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query64.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query64.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query65.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query65.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query66.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query66.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query67.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query67.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query68.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query68.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query69.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query69.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query7.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query7.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query70.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query70.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query71.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query71.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query72.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query72.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query73.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query73.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query74.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query74.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query75.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query75.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query76.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query76.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query77.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query77.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query78.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query78.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query79.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query79.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query8.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query8.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query80.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query80.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query81.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query81.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query82.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query82.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query84.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query84.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query85.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query85.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query86.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query86.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query87.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query87.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query88.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query88.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query89.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query89.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query9.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query9.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query90.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query90.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query91.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query91.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query92.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query92.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query93.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query93.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query94.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query94.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query95.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query95.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query96.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query96.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query97.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query97.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query98.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query98.groovy diff --git a/regression-test/suites/nereids_hint_tpcds_p0/shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf1000/hint/query99.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpcds_p0/shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/hint/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/load.groovy b/regression-test/suites/shape_check/tpcds_sf1000/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/load.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/load.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query33.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query35.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query83.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf1000/shape/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf1000_p0/shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf1000/shape/query99.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/ddl/gen_shape.py b/regression-test/suites/shape_check/tpcds_sf10t_orc/ddl/gen_shape.py similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/ddl/gen_shape.py rename to regression-test/suites/shape_check/tpcds_sf10t_orc/ddl/gen_shape.py diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/ddl/shape.tmpl b/regression-test/suites/shape_check/tpcds_sf10t_orc/ddl/shape.tmpl similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/ddl/shape.tmpl rename to regression-test/suites/shape_check/tpcds_sf10t_orc/ddl/shape.tmpl diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/load.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/load.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/load.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query1.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query1.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query1.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query1.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query10.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query10.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query10.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query10.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query11.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query11.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query11.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query11.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query12.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query12.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query12.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query12.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query13.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query13.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query13.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query13.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query14.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query14.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query14.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query14.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query15.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query15.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query15.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query15.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query16.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query16.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query16.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query16.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query17.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query17.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query17.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query17.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query18.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query18.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query18.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query18.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query19.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query19.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query19.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query19.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query2.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query2.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query2.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query2.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query20.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query20.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query20.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query20.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query21.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query21.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query21.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query21.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query22.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query22.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query22.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query22.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query23.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query23.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query23.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query23.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query24.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query24.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query24.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query24.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query25.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query25.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query25.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query25.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query26.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query26.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query26.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query26.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query27.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query27.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query27.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query27.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query28.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query28.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query28.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query28.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query29.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query29.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query29.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query29.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query3.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query3.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query3.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query3.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query30.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query30.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query30.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query30.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query31.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query31.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query31.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query31.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query32.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query32.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query32.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query32.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query33.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query33.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query33.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query33.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query34.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query34.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query34.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query34.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query35.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query35.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query35.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query35.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query36.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query36.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query36.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query36.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query37.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query37.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query37.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query37.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query38.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query38.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query38.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query38.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query39.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query39.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query39.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query39.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query4.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query4.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query4.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query4.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query40.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query40.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query40.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query40.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query41.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query41.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query41.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query41.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query42.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query42.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query42.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query42.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query43.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query43.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query43.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query43.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query44.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query44.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query44.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query44.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query45.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query45.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query45.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query45.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query46.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query46.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query46.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query46.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query47.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query47.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query47.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query47.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query48.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query48.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query48.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query48.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query49.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query49.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query49.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query49.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query5.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query5.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query5.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query5.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query50.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query50.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query50.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query50.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query51.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query51.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query51.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query51.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query52.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query52.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query52.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query52.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query53.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query53.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query53.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query53.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query54.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query54.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query54.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query54.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query55.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query55.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query55.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query55.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query56.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query56.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query56.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query56.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query57.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query57.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query57.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query57.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query58.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query58.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query58.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query58.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query59.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query59.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query59.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query59.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query6.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query6.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query6.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query6.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query60.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query60.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query60.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query60.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query61.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query61.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query61.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query61.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query62.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query62.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query62.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query62.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query63.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query63.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query63.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query63.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query64.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query64.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query64.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query64.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query65.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query65.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query65.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query65.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query66.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query66.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query66.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query66.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query67.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query67.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query67.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query67.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query68.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query68.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query68.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query68.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query69.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query69.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query69.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query69.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query7.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query7.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query7.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query7.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query70.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query70.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query70.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query70.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query71.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query71.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query71.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query71.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query72.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query72.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query72.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query72.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query73.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query73.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query73.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query73.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query74.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query74.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query74.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query74.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query75.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query75.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query75.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query75.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query76.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query76.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query76.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query76.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query77.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query77.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query77.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query77.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query78.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query78.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query78.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query78.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query79.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query79.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query79.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query79.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query8.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query8.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query8.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query8.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query80.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query80.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query80.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query80.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query81.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query81.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query81.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query81.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query82.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query82.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query82.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query82.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query83.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query83.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query83.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query83.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query84.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query84.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query84.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query84.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query85.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query85.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query85.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query85.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query86.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query86.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query86.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query86.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query87.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query87.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query87.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query87.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query88.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query88.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query88.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query88.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query89.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query89.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query89.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query89.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query9.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query9.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query9.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query9.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query90.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query90.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query90.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query90.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query91.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query91.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query91.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query91.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query92.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query92.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query92.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query92.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query93.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query93.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query93.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query93.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query94.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query94.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query94.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query94.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query95.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query95.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query95.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query95.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query96.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query96.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query96.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query96.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query97.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query97.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query97.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query97.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query98.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query98.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query98.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query98.groovy diff --git a/regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query99.groovy b/regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query99.groovy similarity index 100% rename from regression-test/suites/nereids_tpcds_shape_sf10t_orc/shape/query99.groovy rename to regression-test/suites/shape_check/tpcds_sf10t_orc/shape/query99.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q10.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q10.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q11.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q11.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q12.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q12.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q13.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q13.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q14.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q14.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q15.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q15.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q17.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q17.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q19.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q19.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q3.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q3.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q4.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q4.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q5.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q5.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q7.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q7.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q8.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q8.groovy diff --git a/regression-test/suites/nereids_hint_tpch_p0/shape/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/hint/q9.groovy similarity index 100% rename from regression-test/suites/nereids_hint_tpch_p0/shape/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/hint/q9.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/load.groovy b/regression-test/suites/shape_check/tpch_sf1000/load.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/load.groovy rename to regression-test/suites/shape_check/tpch_sf1000/load.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q1.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q1.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q1.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q10.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q10.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q11.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q11.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q12.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q12.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q13.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q13.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q14.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q14.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q15.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q15.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q16.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q16.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q16.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q17.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q17.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q18.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q18.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q18.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q19.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q19.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q2.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q2.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q2.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20-rewrite.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q20-rewrite.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q20.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q20.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q20.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q21.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q21.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q21.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q22.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q22.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q22.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q3.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q3.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q4.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q4.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q5.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q5.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q6.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q6.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q6.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q7.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q7.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q8.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q8.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q9.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/nostats_rf_prune/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/nostats_rf_prune/q9.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q1.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q1.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q1.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q1.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q10.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q10.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q11.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q11.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q12.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q12.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q13.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q13.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q14.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q14.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q15.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q15.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q16.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q16.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q16.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q16.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q17.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q17.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q18.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q18.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q18.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q18.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q19.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q19.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q2.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q2.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q2.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q2.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q20-rewrite.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20-rewrite.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q20-rewrite.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q20.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q20.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q20.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q21.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q21.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q21.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q21.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q22.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q22.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q22.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q22.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q3.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q3.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q4.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q4.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q5.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q5.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q6.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q6.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q6.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q6.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q7.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q7.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q8.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q8.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/rf_prune/q9.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/rf_prune/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/rf_prune/q9.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/runtime_filter/test_pushdown_setop.groovy b/regression-test/suites/shape_check/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/runtime_filter/test_pushdown_setop.groovy rename to regression-test/suites/shape_check/tpch_sf1000/runtime_filter/test_pushdown_setop.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q1.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q1.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q1.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q1.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q10.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q10.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q11.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q11.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q12.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q12.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q13.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q13.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q14.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q14.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q15.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q15.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q16.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q16.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q16.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q16.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q17.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q17.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q18.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q18.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q18.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q18.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q19.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q19.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q2.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q2.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q2.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q2.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q20-rewrite.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20-rewrite.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q20-rewrite.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q20.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q20.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q20.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q21.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q21.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q21.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q21.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q22.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q22.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q22.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q22.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q3.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q3.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q4.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q4.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q5.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q5.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q6.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q6.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q6.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q6.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q7.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q7.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q8.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q8.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape/q9.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape/q9.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q1.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q1.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q1.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q10.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q10.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q10.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q11.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q11.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q11.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q12.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q12.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q12.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q13.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q13.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q13.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q14.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q14.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q14.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q15.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q15.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q15.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q16.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q16.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q16.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q17.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q17.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q17.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q18.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q18.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q18.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q19.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q19.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q19.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q2.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q2.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q2.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q20-rewrite.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20-rewrite.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q20-rewrite.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q20.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q20.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q20.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q21.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q21.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q21.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q22.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q22.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q22.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q3.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q3.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q3.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q4.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q4.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q4.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q5.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q5.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q5.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q6.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q6.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q6.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q7.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q7.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q7.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q8.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q8.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q8.groovy diff --git a/regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.groovy b/regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q9.groovy similarity index 100% rename from regression-test/suites/nereids_tpch_shape_sf1000_p0/shape_no_stats/q9.groovy rename to regression-test/suites/shape_check/tpch_sf1000/shape_no_stats/q9.groovy From 7209101c2486b1a016b11676ef52ebe43ed5b5ea Mon Sep 17 00:00:00 2001 From: Gavin Chou Date: Mon, 23 Dec 2024 21:21:23 +0800 Subject: [PATCH 67/82] [opt](log) Optimize compaction log to match output version (#44375) --- be/src/cloud/cloud_base_compaction.cpp | 7 ++++--- be/src/cloud/cloud_cumulative_compaction.cpp | 8 +++++--- be/src/cloud/cloud_tablet.cpp | 3 +++ 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/be/src/cloud/cloud_base_compaction.cpp b/be/src/cloud/cloud_base_compaction.cpp index 9742e57dcf9d34..d053214e964a78 100644 --- a/be/src/cloud/cloud_base_compaction.cpp +++ b/be/src/cloud/cloud_base_compaction.cpp @@ -268,8 +268,9 @@ Status CloudBaseCompaction::execute_compact() { << ", output_version=" << _output_version; return res; } - LOG_INFO("finish CloudBaseCompaction, tablet_id={}, cost={}ms", _tablet->tablet_id(), - duration_cast(steady_clock::now() - start).count()) + LOG_INFO("finish CloudBaseCompaction, tablet_id={}, cost={}ms range=[{}-{}]", + _tablet->tablet_id(), duration_cast(steady_clock::now() - start).count(), + _input_rowsets.front()->start_version(), _input_rowsets.back()->end_version()) .tag("job_id", _uuid) .tag("input_rowsets", _input_rowsets.size()) .tag("input_rows", _input_row_num) @@ -343,7 +344,7 @@ Status CloudBaseCompaction::modify_rowsets() { .tag("input_rowsets", _input_rowsets.size()) .tag("input_rows", _input_row_num) .tag("input_segments", _input_segments) - .tag("update_bitmap_size", output_rowset_delete_bitmap->delete_bitmap.size()); + .tag("num_output_delete_bitmap", output_rowset_delete_bitmap->delete_bitmap.size()); compaction_job->set_delete_bitmap_lock_initiator(initiator); } diff --git a/be/src/cloud/cloud_cumulative_compaction.cpp b/be/src/cloud/cloud_cumulative_compaction.cpp index 1acf8efe32e62b..c7a82b322fb82a 100644 --- a/be/src/cloud/cloud_cumulative_compaction.cpp +++ b/be/src/cloud/cloud_cumulative_compaction.cpp @@ -204,8 +204,9 @@ Status CloudCumulativeCompaction::execute_compact() { << ", output_version=" << _output_version; return res; } - LOG_INFO("finish CloudCumulativeCompaction, tablet_id={}, cost={}ms", _tablet->tablet_id(), - duration_cast(steady_clock::now() - start).count()) + LOG_INFO("finish CloudCumulativeCompaction, tablet_id={}, cost={}ms, range=[{}-{}]", + _tablet->tablet_id(), duration_cast(steady_clock::now() - start).count(), + _input_rowsets.front()->start_version(), _input_rowsets.back()->end_version()) .tag("job_id", _uuid) .tag("input_rowsets", _input_rowsets.size()) .tag("input_rows", _input_row_num) @@ -299,7 +300,8 @@ Status CloudCumulativeCompaction::modify_rowsets() { .tag("input_rowsets", _input_rowsets.size()) .tag("input_rows", _input_row_num) .tag("input_segments", _input_segments) - .tag("update_bitmap_size", output_rowset_delete_bitmap->delete_bitmap.size()); + .tag("number_output_delete_bitmap", + output_rowset_delete_bitmap->delete_bitmap.size()); compaction_job->set_delete_bitmap_lock_initiator(initiator); } diff --git a/be/src/cloud/cloud_tablet.cpp b/be/src/cloud/cloud_tablet.cpp index 4e351f7cfa5110..31b7c6dd5dc8cd 100644 --- a/be/src/cloud/cloud_tablet.cpp +++ b/be/src/cloud/cloud_tablet.cpp @@ -409,6 +409,9 @@ uint64_t CloudTablet::delete_expired_stale_rowsets() { auto rs_it = _stale_rs_version_map.find(v_ts->version()); if (rs_it != _stale_rs_version_map.end()) { expired_rowsets.push_back(rs_it->second); + LOG(INFO) << "erase stale rowset, tablet_id=" << tablet_id() + << " rowset_id=" << rs_it->second->rowset_id().to_string() + << " version=" << rs_it->first.to_string(); _stale_rs_version_map.erase(rs_it); } else { LOG(WARNING) << "cannot find stale rowset " << v_ts->version() << " in tablet " From 583e6cde94d8bd77634bf95aa0275872b13c93ca Mon Sep 17 00:00:00 2001 From: yujun Date: Tue, 24 Dec 2024 10:23:37 +0800 Subject: [PATCH 68/82] [test](nereids) add arthmetic comparison ut (#45690) ### What problem does this PR solve? Issue Number: close #xxx Related PR: #xxx Problem Summary: --- .../SimplifyArithmeticComparisonRuleTest.java | 193 +++++++++++++----- 1 file changed, 147 insertions(+), 46 deletions(-) diff --git a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyArithmeticComparisonRuleTest.java b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyArithmeticComparisonRuleTest.java index 4d932187611136..32857d4f4ae8fa 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyArithmeticComparisonRuleTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/nereids/rules/expression/rules/SimplifyArithmeticComparisonRuleTest.java @@ -21,70 +21,171 @@ import org.apache.doris.nereids.rules.expression.ExpressionRewriteTestHelper; import org.apache.doris.nereids.rules.expression.ExpressionRuleExecutor; import org.apache.doris.nereids.trees.expressions.Expression; -import org.apache.doris.nereids.trees.expressions.Slot; -import org.apache.doris.nereids.trees.expressions.SlotReference; -import org.apache.doris.nereids.types.IntegerType; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import java.util.HashMap; -import java.util.Map; - class SimplifyArithmeticComparisonRuleTest extends ExpressionRewriteTestHelper { @Test - public void testProcess() { - Map nameToSlot = new HashMap<>(); - nameToSlot.put("a", new SlotReference("a", IntegerType.INSTANCE)); + public void testNumeric() { executor = new ExpressionRuleExecutor(ImmutableList.of( ExpressionRewrite.bottomUp(SimplifyArithmeticComparisonRule.INSTANCE) )); - assertRewriteAfterSimplify("a + 1 > 1", "a > cast((1 - 1) as INT)", nameToSlot); - assertRewriteAfterSimplify("a - 1 > 1", "a > cast((1 + 1) as INT)", nameToSlot); - assertRewriteAfterSimplify("a / -2 > 1", "cast((1 * -2) as INT) > a", nameToSlot); + + // test tinyint type + assertRewriteAfterSimplify("TA + 2 > 1", "cast(TA as SMALLINT) > (1 - 2)"); + assertRewriteAfterSimplify("TA - 2 > 1", "cast(TA as SMALLINT) > (1 + 2)"); + assertRewriteAfterSimplify("1 + TA > 2", "cast(TA as SMALLINT) > (2 - 1)"); + assertRewriteAfterSimplify("-1 + TA > 2", "cast(TA as SMALLINT) > (2 - (-1))"); + assertRewriteAfterSimplify("1 - TA > 2", "cast(TA as SMALLINT) < (1 - 2))"); + assertRewriteAfterSimplify("-1 - TA > 2", "cast(TA as SMALLINT) < ((-1) - 2)"); + assertRewriteAfterSimplify("2 * TA > 1", "((2 * TA) > 1)"); + assertRewriteAfterSimplify("-2 * TA > 1", "((-2 * TA) > 1)"); + assertRewriteAfterSimplify("2 / TA > 1", "((2 / TA) > 1)"); + assertRewriteAfterSimplify("-2 / TA > 1", "((-2 / TA) > 1)"); + assertRewriteAfterSimplify("TA * 2 > 1", "((TA * 2) > 1)"); + assertRewriteAfterSimplify("TA * (-2) > 1", "((TA * (-2)) > 1)"); + assertRewriteAfterSimplify("TA / 2 > 1", "cast(TA as SMALLINT) > (1 * 2)"); + assertRewriteAfterSimplify("TA / -2 > 1", "(1 * -2) > cast(TA as SMALLINT)"); + + // test integer type + assertRewriteAfterSimplify("IA + 2 > 1", "IA > cast((1 - 2) as INT)"); + assertRewriteAfterSimplify("IA - 2 > 1", "IA > cast((1 + 2) as INT)"); + assertRewriteAfterSimplify("1 + IA > 2", "IA > cast((2 - 1) as INT)"); + assertRewriteAfterSimplify("-1 + IA > 2", "IA > cast((2 - (-1)) as INT)"); + assertRewriteAfterSimplify("1 - IA > 2", "IA < cast((1 - 2) as INT)"); + assertRewriteAfterSimplify("-1 - IA > 2", "IA < cast(((-1) - 2) as INT)"); + assertRewriteAfterSimplify("2 * IA > 1", "((2 * IA) > 1)"); + assertRewriteAfterSimplify("-2 * IA > 1", "((-2 * IA) > 1)"); + assertRewriteAfterSimplify("2 / IA > 1", "((2 / IA) > 1)"); + assertRewriteAfterSimplify("-2 / IA > 1", "((-2 / IA) > 1)"); + assertRewriteAfterSimplify("IA * 2 > 1", "((IA * 2) > 1)"); + assertRewriteAfterSimplify("IA * (-2) > 1", "((IA * (-2)) > 1)"); + assertRewriteAfterSimplify("IA / 2 > 1", "(IA > cast((1 * 2) as INT))"); + assertRewriteAfterSimplify("IA / -2 > 1", "cast((1 * -2) as INT) > IA"); // test integer type - assertRewriteAfterSimplify("1 + a > 2", "a > cast((2 - 1) as INT)", nameToSlot); - assertRewriteAfterSimplify("-1 + a > 2", "a > cast((2 - (-1)) as INT)", nameToSlot); - assertRewriteAfterSimplify("1 - a > 2", "a < cast((1 - 2) as INT)", nameToSlot); - assertRewriteAfterSimplify("-1 - a > 2", "a < cast(((-1) - 2) as INT)", nameToSlot); - assertRewriteAfterSimplify("2 * a > 1", "((2 * a) > 1)", nameToSlot); - assertRewriteAfterSimplify("-2 * a > 1", "((-2 * a) > 1)", nameToSlot); - assertRewriteAfterSimplify("2 / a > 1", "((2 / a) > 1)", nameToSlot); - assertRewriteAfterSimplify("-2 / a > 1", "((-2 / a) > 1)", nameToSlot); - assertRewriteAfterSimplify("a * 2 > 1", "((a * 2) > 1)", nameToSlot); - assertRewriteAfterSimplify("a * (-2) > 1", "((a * (-2)) > 1)", nameToSlot); - assertRewriteAfterSimplify("a / 2 > 1", "(a > cast((1 * 2) as INT))", nameToSlot); + assertRewriteAfterSimplify("TA + 2 > 200", "cast(TA as INT) > (200 - 2)"); + assertRewriteAfterSimplify("TA - 2 > 200", "cast(TA as INT) > (200 + 2)"); + assertRewriteAfterSimplify("1 + TA > 200", "cast(TA as INT) > (200 - 1)"); + assertRewriteAfterSimplify("-1 + TA > 200", "cast(TA as INT) > (200 - (-1))"); + assertRewriteAfterSimplify("1 - TA > 200", "cast(TA as INT) < (1 - 200))"); + assertRewriteAfterSimplify("-1 - TA > 200", "cast(TA as INT) < ((-1) - 200)"); + assertRewriteAfterSimplify("2 * TA > 200", "((2 * TA) > 200)"); + assertRewriteAfterSimplify("-2 * TA > 200", "((-2 * TA) > 200)"); + assertRewriteAfterSimplify("2 / TA > 200", "((2 / TA) > 200)"); + assertRewriteAfterSimplify("-2 / TA > 200", "((-2 / TA) > 200)"); + assertRewriteAfterSimplify("TA * 2 > 200", "((TA * 2) > 200)"); + assertRewriteAfterSimplify("TA * (-2) > 200", "((TA * (-2)) > 200)"); + assertRewriteAfterSimplify("TA / 2 > 200", "cast(TA as INT) > (200 * 2)"); + assertRewriteAfterSimplify("TA / -2 > 200", "(200 * -2) > cast(TA as INT)"); // test decimal type - assertRewriteAfterSimplify("1.1 + a > 2.22", "(cast(a as DECIMALV3(12, 2)) > cast((2.22 - 1.1) as DECIMALV3(12, 2)))", nameToSlot); - assertRewriteAfterSimplify("-1.1 + a > 2.22", "(cast(a as DECIMALV3(12, 2)) > cast((2.22 - (-1.1)) as DECIMALV3(12, 2)))", nameToSlot); - assertRewriteAfterSimplify("1.1 - a > 2.22", "(cast(a as DECIMALV3(11, 1)) < cast((1.1 - 2.22) as DECIMALV3(11, 1)))", nameToSlot); - assertRewriteAfterSimplify("-1.1 - a > 2.22", "(cast(a as DECIMALV3(11, 1)) < cast((-1.1 - 2.22) as DECIMALV3(11, 1)))", nameToSlot); - assertRewriteAfterSimplify("2.22 * a > 1.1", "((2.22 * a) > 1.1)", nameToSlot); - assertRewriteAfterSimplify("-2.22 * a > 1.1", "-2.22 * a > 1.1", nameToSlot); - assertRewriteAfterSimplify("2.22 / a > 1.1", "((2.22 / a) > 1.1)", nameToSlot); - assertRewriteAfterSimplify("-2.22 / a > 1.1", "((-2.22 / a) > 1.1)", nameToSlot); - assertRewriteAfterSimplify("a * 2.22 > 1.1", "a * 2.22 > 1.1", nameToSlot); - assertRewriteAfterSimplify("a * (-2.22) > 1.1", "a * (-2.22) > 1.1", nameToSlot); - assertRewriteAfterSimplify("a / 2.22 > 1.1", "(cast(a as DECIMALV3(13, 3)) > cast((1.1 * 2.22) as DECIMALV3(13, 3)))", nameToSlot); - assertRewriteAfterSimplify("a / (-2.22) > 1.1", "(cast((1.1 * -2.22) as DECIMALV3(13, 3)) > cast(a as DECIMALV3(13, 3)))", nameToSlot); - - // test (1 + a) can be processed - assertRewriteAfterSimplify("2 - (1 + a) > 3", "(a < ((2 - 3) - 1))", nameToSlot); - assertRewriteAfterSimplify("(1 - a) / 2 > 3", "(a < (1 - 6))", nameToSlot); - assertRewriteAfterSimplify("1 - a / 2 > 3", "(a < ((1 - 3) * 2))", nameToSlot); - assertRewriteAfterSimplify("(1 - (a + 4)) / 2 > 3", "(cast(a as BIGINT) < ((1 - 6) - 4))", nameToSlot); - assertRewriteAfterSimplify("2 * (1 + a) > 1", "(2 * (1 + a)) > 1", nameToSlot); + assertRewriteAfterSimplify("1.1 + IA > 2.22", "(cast(IA as DECIMALV3(12, 2)) > cast((2.22 - 1.1) as DECIMALV3(12, 2)))"); + assertRewriteAfterSimplify("-1.1 + IA > 2.22", "(cast(IA as DECIMALV3(12, 2)) > cast((2.22 - (-1.1)) as DECIMALV3(12, 2)))"); + assertRewriteAfterSimplify("1.1 - IA > 2.22", "(cast(IA as DECIMALV3(11, 1)) < cast((1.1 - 2.22) as DECIMALV3(11, 1)))"); + assertRewriteAfterSimplify("-1.1 - IA > 2.22", "(cast(IA as DECIMALV3(11, 1)) < cast((-1.1 - 2.22) as DECIMALV3(11, 1)))"); + assertRewriteAfterSimplify("2.22 * IA > 1.1", "((2.22 * IA) > 1.1)"); + assertRewriteAfterSimplify("-2.22 * IA > 1.1", "-2.22 * IA > 1.1"); + assertRewriteAfterSimplify("2.22 / IA > 1.1", "((2.22 / IA) > 1.1)"); + assertRewriteAfterSimplify("-2.22 / IA > 1.1", "((-2.22 / IA) > 1.1)"); + assertRewriteAfterSimplify("IA * 2.22 > 1.1", "IA * 2.22 > 1.1"); + assertRewriteAfterSimplify("IA * (-2.22) > 1.1", "IA * (-2.22) > 1.1"); + assertRewriteAfterSimplify("IA / 2.22 > 1.1", "(cast(IA as DECIMALV3(13, 3)) > cast((1.1 * 2.22) as DECIMALV3(13, 3)))"); + assertRewriteAfterSimplify("IA / (-2.22) > 1.1", "(cast((1.1 * -2.22) as DECIMALV3(13, 3)) > cast(IA as DECIMALV3(13, 3)))"); + + // test (1 + IA) can be processed + assertRewriteAfterSimplify("2 - (1 + IA) > 3", "(IA < ((2 - 3) - 1))"); + assertRewriteAfterSimplify("(1 - IA) / 2 > 3", "(IA < (1 - 6))"); + assertRewriteAfterSimplify("1 - IA / 2 > 3", "(IA < ((1 - 3) * 2))"); + assertRewriteAfterSimplify("(1 - (IA + 4)) / 2 > 3", "(cast(IA as BIGINT) < ((1 - 6) - 4))"); + assertRewriteAfterSimplify("2 * (1 + IA) > 1", "(2 * (1 + IA)) > 1"); + + // test (IA + IB) can be processed + assertRewriteAfterSimplify("2 - (1 + (IA + IB)) > 3", "(IA + IB) < cast(((2 - 3) - 1) as BIGINT)"); + assertRewriteAfterSimplify("(1 - (IA + IB)) / 2 > 3", "(IA + IB) < cast((1 - 6) as BIGINT)"); + assertRewriteAfterSimplify("1 - (IA + IB) / 2 > 3", "(IA + IB) < cast(((1 - 3) * 2) as BIGINT)"); + assertRewriteAfterSimplify("2 * (1 + (IA + IB)) > 1", "(2 * (1 + (IA + IB))) > 1"); + } + + @Test + public void testDateLike() { + executor = new ExpressionRuleExecutor(ImmutableList.of( + bottomUp( + SimplifyArithmeticRule.INSTANCE, + SimplifyArithmeticComparisonRule.INSTANCE + ) + )); + + // test datetimev2 type + assertRewriteAfterTypeCoercion("years_add(AA, 1) > '2021-01-01 00:00:00'", "(years_add(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("years_sub(AA, 1) > '2021-01-01 00:00:00'", "(years_sub(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("months_add(AA, 1) > '2021-01-01 00:00:00'", "(months_add(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("months_sub(AA, 1) > '2021-01-01 00:00:00'", "(months_sub(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("weeks_add(AA, 1) > '2021-01-01 00:00:00'", "AA > weeks_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("weeks_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > weeks_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("days_add(AA, 1) > '2021-01-01 00:00:00'", "AA > days_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("days_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > days_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_add(AA, 1) > '2021-01-01 00:00:00'", "AA > hours_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > hours_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_add(AA, 1) > '2021-01-01 00:00:00'", "AA > minutes_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > minutes_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_add(AA, 1) > '2021-01-01 00:00:00'", "AA > seconds_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_sub(AA, 1) > '2021-01-01 00:00:00'", "AA > seconds_add('2021-01-01 00:00:00', 1)"); + + assertRewriteAfterTypeCoercion("years_add(AA, 1) > '2021-01-01'", "(years_add(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("years_sub(AA, 1) > '2021-01-01'", "(years_sub(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("months_add(AA, 1) > '2021-01-01'", "(months_add(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("months_sub(AA, 1) > '2021-01-01'", "(months_sub(AA, 1) > '2021-01-01 00:00:00')"); + assertRewriteAfterTypeCoercion("weeks_add(AA, 1) > '2021-01-01'", "AA > weeks_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("weeks_sub(AA, 1) > '2021-01-01'", "AA > weeks_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("days_add(AA, 1) > '2021-01-01'", "AA > days_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("days_sub(AA, 1) > '2021-01-01'", "AA > days_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_add(AA, 1) > '2021-01-01'", "AA > hours_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_sub(AA, 1) > '2021-01-01'", "AA > hours_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_add(AA, 1) > '2021-01-01'", "AA > minutes_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_sub(AA, 1) > '2021-01-01'", "AA > minutes_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_add(AA, 1) > '2021-01-01'", "AA > seconds_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_sub(AA, 1) > '2021-01-01'", "AA > seconds_add('2021-01-01 00:00:00', 1)"); + + // test date type + assertRewriteAfterTypeCoercion("years_add(CA, 1) > '2021-01-01'", "years_add(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("years_sub(CA, 1) > '2021-01-01'", "years_sub(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("months_add(CA, 1) > '2021-01-01'", "months_add(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("months_sub(CA, 1) > '2021-01-01'", "months_sub(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("weeks_add(CA, 1) > '2021-01-01'", "CA > weeks_sub(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("weeks_sub(CA, 1) > '2021-01-01'", "CA > weeks_add(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("days_add(CA, 1) > '2021-01-01'", "CA > days_sub(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("days_sub(CA, 1) > '2021-01-01'", "CA > days_add(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("hours_add(CA, 1) > '2021-01-01'", "cast(CA as datetime) > hours_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_sub(CA, 1) > '2021-01-01'", "cast(CA as datetime) > hours_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_add(CA, 1) > '2021-01-01'", "cast(CA as datetime) > minutes_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_sub(CA, 1) > '2021-01-01'", "cast(CA as datetime) > minutes_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_add(CA, 1) > '2021-01-01'", "cast(CA as datetime) > seconds_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_sub(CA, 1) > '2021-01-01'", "cast(CA as datetime) > seconds_add('2021-01-01 00:00:00', 1)"); + + assertRewriteAfterTypeCoercion("years_add(CA, 1) > '2021-01-01 00:00:00'", "years_add(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("years_sub(CA, 1) > '2021-01-01 00:00:00'", "years_sub(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("months_add(CA, 1) > '2021-01-01 00:00:00'", "months_add(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("months_sub(CA, 1) > '2021-01-01 00:00:00'", "months_sub(CA, 1) > cast('2021-01-01' as date)"); + assertRewriteAfterTypeCoercion("weeks_add(CA, 1) > '2021-01-01 00:00:00'", "CA > weeks_sub(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("weeks_sub(CA, 1) > '2021-01-01 00:00:00'", "CA > weeks_add(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("days_add(CA, 1) > '2021-01-01 00:00:00'", "CA > days_sub(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("days_sub(CA, 1) > '2021-01-01 00:00:00'", "CA > days_add(cast('2021-01-01' as date), 1)"); + assertRewriteAfterTypeCoercion("hours_add(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > hours_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("hours_sub(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > hours_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_add(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > minutes_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("minutes_sub(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > minutes_add('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_add(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > seconds_sub('2021-01-01 00:00:00', 1)"); + assertRewriteAfterTypeCoercion("seconds_sub(CA, 1) > '2021-01-01 00:00:00'", "cast(CA as datetime) > seconds_add('2021-01-01 00:00:00', 1)"); } - private void assertRewriteAfterSimplify(String expr, String expected, Map slotNameToSlot) { + private void assertRewriteAfterSimplify(String expr, String expected) { Expression needRewriteExpression = PARSER.parseExpression(expr); - if (slotNameToSlot != null) { - needRewriteExpression = replaceUnboundSlot(needRewriteExpression, slotNameToSlot); - } + needRewriteExpression = replaceUnboundSlot(needRewriteExpression, Maps.newHashMap()); Expression rewritten = executor.rewrite(needRewriteExpression, context); Expression expectedExpression = PARSER.parseExpression(expected); Assertions.assertEquals(expectedExpression.toSql(), rewritten.toSql()); From cd42ec1db8010bf391f88a09a6f22cc3c35d5d24 Mon Sep 17 00:00:00 2001 From: wangbo Date: Tue, 24 Dec 2024 10:34:41 +0800 Subject: [PATCH 69/82] [Fix] Add compile check for SchemaScanner (#45797) --- .../schema_active_queries_scanner.cpp | 6 ++-- .../schema_backend_active_tasks.cpp | 4 ++- ...chema_catalog_meta_cache_stats_scanner.cpp | 6 ++-- .../schema_scanner/schema_columns_scanner.cpp | 12 ++++--- .../schema_file_cache_statistics.cpp | 3 +- .../schema_partitions_scanner.cpp | 6 ++-- .../schema_processlist_scanner.cpp | 3 +- .../schema_scanner/schema_routine_scanner.cpp | 6 ++-- .../schema_scanner/schema_rowsets_scanner.cpp | 32 ++++++++++--------- .../schema_table_options_scanner.cpp | 6 ++-- .../schema_table_properties_scanner.cpp | 6 ++-- .../schema_workload_group_privileges.cpp | 6 ++-- ..._workload_group_resource_usage_scanner.cpp | 4 ++- .../schema_workload_groups_scanner.cpp | 6 ++-- .../schema_workload_sched_policy_scanner.cpp | 6 ++-- 15 files changed, 70 insertions(+), 42 deletions(-) diff --git a/be/src/exec/schema_scanner/schema_active_queries_scanner.cpp b/be/src/exec/schema_scanner/schema_active_queries_scanner.cpp index 9805163802699a..0ccff6439b802b 100644 --- a/be/src/exec/schema_scanner/schema_active_queries_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_active_queries_scanner.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaActiveQueriesScanner::_s_tbls_columns = { // name, type, size {"QUERY_ID", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -92,7 +94,7 @@ Status SchemaActiveQueriesScanner::_get_active_queries_block_from_fe() { _active_query_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("active queries schema is not match for FE and BE"); } @@ -119,7 +121,7 @@ Status SchemaActiveQueriesScanner::get_next_block_internal(vectorized::Block* bl if (_active_query_block == nullptr) { RETURN_IF_ERROR(_get_active_queries_block_from_fe()); - _total_rows = _active_query_block->rows(); + _total_rows = (int)_active_query_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_backend_active_tasks.cpp b/be/src/exec/schema_scanner/schema_backend_active_tasks.cpp index 74e95f4203217c..eb7b373c7dc7f6 100644 --- a/be/src/exec/schema_scanner/schema_backend_active_tasks.cpp +++ b/be/src/exec/schema_scanner/schema_backend_active_tasks.cpp @@ -25,6 +25,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaBackendActiveTasksScanner::_s_tbls_columns = { // name, type, size {"BE_ID", TYPE_BIGINT, sizeof(int64_t), false}, @@ -76,7 +78,7 @@ Status SchemaBackendActiveTasksScanner::get_next_block_internal(vectorized::Bloc ExecEnv::GetInstance()->runtime_query_statistics_mgr()->get_active_be_tasks_block( _task_stats_block.get()); - _total_rows = _task_stats_block->rows(); + _total_rows = (int)_task_stats_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_catalog_meta_cache_stats_scanner.cpp b/be/src/exec/schema_scanner/schema_catalog_meta_cache_stats_scanner.cpp index 4c067057729f21..576ae3f9e919c7 100644 --- a/be/src/exec/schema_scanner/schema_catalog_meta_cache_stats_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_catalog_meta_cache_stats_scanner.cpp @@ -27,6 +27,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaCatalogMetaCacheStatsScanner::_s_tbls_columns = { {"CATALOG_NAME", TYPE_STRING, sizeof(StringRef), true}, {"CACHE_NAME", TYPE_STRING, sizeof(StringRef), true}, @@ -86,7 +88,7 @@ Status SchemaCatalogMetaCacheStatsScanner::_get_meta_cache_from_fe() { _block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError( "catalog meta cache stats schema is not match for FE and BE"); @@ -115,7 +117,7 @@ Status SchemaCatalogMetaCacheStatsScanner::get_next_block_internal(vectorized::B if (_block == nullptr) { RETURN_IF_ERROR(_get_meta_cache_from_fe()); - _total_rows = _block->rows(); + _total_rows = (int)_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_columns_scanner.cpp b/be/src/exec/schema_scanner/schema_columns_scanner.cpp index b60dfc3d203f89..2cc827a7b43e78 100644 --- a/be/src/exec/schema_scanner/schema_columns_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_columns_scanner.cpp @@ -30,6 +30,8 @@ #include "vec/common/string_ref.h" namespace doris { +#include "common/compile_check_begin.h" + class RuntimeState; namespace vectorized { @@ -411,7 +413,7 @@ Status SchemaColumnsScanner::_fill_block_impl(vectorized::Block* block) { { std::vector strs(columns_num); int offset_index = 0; - int cur_table_index = _table_index - _desc_result.tables_offset.size(); + int cur_table_index = int(_table_index - _desc_result.tables_offset.size()); for (int i = 0; i < columns_num; ++i) { while (_desc_result.tables_offset[offset_index] <= i) { @@ -609,14 +611,14 @@ Status SchemaColumnsScanner::_fill_block_impl(vectorized::Block* block) { // EXTRA { StringRef str = StringRef("", 0); - std::vector datas(columns_num, &str); - RETURN_IF_ERROR(fill_dest_column_for_range(block, 17, datas)); + std::vector filled_values(columns_num, &str); + RETURN_IF_ERROR(fill_dest_column_for_range(block, 17, filled_values)); } // PRIVILEGES { StringRef str = StringRef("", 0); - std::vector datas(columns_num, &str); - RETURN_IF_ERROR(fill_dest_column_for_range(block, 18, datas)); + std::vector filled_values(columns_num, &str); + RETURN_IF_ERROR(fill_dest_column_for_range(block, 18, filled_values)); } // COLUMN_COMMENT { diff --git a/be/src/exec/schema_scanner/schema_file_cache_statistics.cpp b/be/src/exec/schema_scanner/schema_file_cache_statistics.cpp index ecad274d218983..8a3efa0edc537c 100644 --- a/be/src/exec/schema_scanner/schema_file_cache_statistics.cpp +++ b/be/src/exec/schema_scanner/schema_file_cache_statistics.cpp @@ -25,6 +25,7 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" std::vector SchemaFileCacheStatisticsScanner::_s_tbls_columns = { // name, type, size @@ -68,7 +69,7 @@ Status SchemaFileCacheStatisticsScanner::get_next_block_internal(vectorized::Blo _stats_block->reserve(_block_rows_limit); ExecEnv::GetInstance()->file_cache_factory()->get_cache_stats_block(_stats_block.get()); - _total_rows = _stats_block->rows(); + _total_rows = (int)_stats_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_partitions_scanner.cpp b/be/src/exec/schema_scanner/schema_partitions_scanner.cpp index 459715fd628943..dd7919a7fe2e30 100644 --- a/be/src/exec/schema_scanner/schema_partitions_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_partitions_scanner.cpp @@ -31,6 +31,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + class RuntimeState; namespace vectorized { class Block; @@ -138,7 +140,7 @@ Status SchemaPartitionsScanner::get_onedb_info_from_fe(int64_t dbId) { } _partitions_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("table options schema is not match for FE and BE"); } @@ -178,7 +180,7 @@ Status SchemaPartitionsScanner::get_next_block_internal(vectorized::Block* block if (_db_index < _db_result.db_ids.size()) { RETURN_IF_ERROR(get_onedb_info_from_fe(_db_result.db_ids[_db_index])); _row_idx = 0; // reset row index so that it start filling for next block. - _total_rows = _partitions_block->rows(); + _total_rows = (int)_partitions_block->rows(); _db_index++; } } diff --git a/be/src/exec/schema_scanner/schema_processlist_scanner.cpp b/be/src/exec/schema_scanner/schema_processlist_scanner.cpp index 185ef2ab44237f..92c80262963b03 100644 --- a/be/src/exec/schema_scanner/schema_processlist_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_processlist_scanner.cpp @@ -30,6 +30,7 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" std::vector SchemaProcessListScanner::_s_processlist_columns = { {"CURRENT_CONNECTED", TYPE_VARCHAR, sizeof(StringRef), false}, @@ -126,7 +127,7 @@ Status SchemaProcessListScanner::_fill_block_impl(vectorized::Block* block) { datas[row_idx] = &int_vals[row_idx]; } else if (_s_processlist_columns[col_idx].type == TYPE_DATETIMEV2) { auto* dv = reinterpret_cast*>(&int_vals[row_idx]); - if (!dv->from_date_str(column_value.data(), column_value.size(), -1, + if (!dv->from_date_str(column_value.data(), (int)column_value.size(), -1, config::allow_zero_date)) { return Status::InternalError( "process list meet invalid data, column={}, data={}, reason={}", diff --git a/be/src/exec/schema_scanner/schema_routine_scanner.cpp b/be/src/exec/schema_scanner/schema_routine_scanner.cpp index 8660d75e8a1faf..7f16c0cddba460 100644 --- a/be/src/exec/schema_scanner/schema_routine_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_routine_scanner.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaRoutinesScanner::_s_tbls_columns = { {"SPECIFIC_NAME", TYPE_VARCHAR, sizeof(StringRef), true}, {"ROUTINE_CATALOG", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -94,7 +96,7 @@ Status SchemaRoutinesScanner::get_block_from_fe() { } _routines_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("routine table schema is not match for FE and BE"); } @@ -121,7 +123,7 @@ Status SchemaRoutinesScanner::get_next_block_internal(vectorized::Block* block, if (_routines_block == nullptr) { RETURN_IF_ERROR(get_block_from_fe()); - _total_rows = _routines_block->rows(); + _total_rows = (int)_routines_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_rowsets_scanner.cpp b/be/src/exec/schema_scanner/schema_rowsets_scanner.cpp index 3aa0e944a822c5..aea98bd61ac89a 100644 --- a/be/src/exec/schema_scanner/schema_rowsets_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_rowsets_scanner.cpp @@ -48,6 +48,8 @@ namespace vectorized { class Block; } // namespace vectorized +#include "common/compile_check_begin.h" + std::vector SchemaRowsetsScanner::_s_tbls_columns = { // name, type, size, is_null {"BACKEND_ID", TYPE_BIGINT, sizeof(int64_t), true}, @@ -132,13 +134,13 @@ Status SchemaRowsetsScanner::get_next_block_internal(vectorized::Block* block, b Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { SCOPED_TIMER(_fill_block_timer); size_t fill_rowsets_num = std::min(1000UL, rowsets_.size() - _rowsets_idx); - auto fill_idx_begin = _rowsets_idx; - auto fill_idx_end = _rowsets_idx + fill_rowsets_num; + size_t fill_idx_begin = _rowsets_idx; + size_t fill_idx_end = _rowsets_idx + fill_rowsets_num; std::vector datas(fill_rowsets_num); // BACKEND_ID { int64_t src = backend_id_; - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { datas[i - fill_idx_begin] = &src; } RETURN_IF_ERROR(fill_dest_column_for_range(block, 0, datas)); @@ -147,7 +149,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { { std::vector rowset_ids(fill_rowsets_num); std::vector strs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; rowset_ids[i - fill_idx_begin] = rowset->rowset_id().to_string(); strs[i - fill_idx_begin] = StringRef(rowset_ids[i - fill_idx_begin].c_str(), @@ -159,7 +161,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // TABLET_ID { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->rowset_meta()->tablet_id(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -169,7 +171,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // ROWSET_NUM_ROWS { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->num_rows(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -179,7 +181,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // TXN_ID { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->txn_id(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -189,7 +191,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // NUM_SEGMENTS { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->num_segments(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -199,7 +201,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // START_VERSION { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->start_version(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -209,7 +211,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // END_VERSION { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->end_version(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -219,7 +221,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // INDEX_DISK_SIZE { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->index_disk_size(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -229,7 +231,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // DATA_DISK_SIZE { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->data_disk_size(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; @@ -239,7 +241,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // CREATION_TIME { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; int64_t creation_time = rowset->creation_time(); srcs[i - fill_idx_begin].from_unixtime(creation_time, TimezoneUtils::default_time_zone); @@ -250,7 +252,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // NEWEST_WRITE_TIMESTAMP { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; int64_t newest_write_timestamp = rowset->newest_write_timestamp(); srcs[i - fill_idx_begin].from_unixtime(newest_write_timestamp, @@ -262,7 +264,7 @@ Status SchemaRowsetsScanner::_fill_block_impl(vectorized::Block* block) { // SCHEMA_VERSION { std::vector srcs(fill_rowsets_num); - for (int i = fill_idx_begin; i < fill_idx_end; ++i) { + for (size_t i = fill_idx_begin; i < fill_idx_end; ++i) { RowsetSharedPtr rowset = rowsets_[i]; srcs[i - fill_idx_begin] = rowset->tablet_schema()->schema_version(); datas[i - fill_idx_begin] = srcs.data() + i - fill_idx_begin; diff --git a/be/src/exec/schema_scanner/schema_table_options_scanner.cpp b/be/src/exec/schema_scanner/schema_table_options_scanner.cpp index bb778996a83f04..fd9d17c8b93cf2 100644 --- a/be/src/exec/schema_scanner/schema_table_options_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_table_options_scanner.cpp @@ -27,6 +27,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaTableOptionsScanner::_s_tbls_columns = { {"TABLE_CATALOG", TYPE_VARCHAR, sizeof(StringRef), true}, {"TABLE_SCHEMA", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -110,7 +112,7 @@ Status SchemaTableOptionsScanner::get_onedb_info_from_fe(int64_t dbId) { } _tableoptions_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("table options schema is not match for FE and BE"); } @@ -150,7 +152,7 @@ Status SchemaTableOptionsScanner::get_next_block_internal(vectorized::Block* blo if (_db_index < _db_result.db_ids.size()) { RETURN_IF_ERROR(get_onedb_info_from_fe(_db_result.db_ids[_db_index])); _row_idx = 0; // reset row index so that it start filling for next block. - _total_rows = _tableoptions_block->rows(); + _total_rows = (int)_tableoptions_block->rows(); _db_index++; } } diff --git a/be/src/exec/schema_scanner/schema_table_properties_scanner.cpp b/be/src/exec/schema_scanner/schema_table_properties_scanner.cpp index 8d6a26a552f707..682560372b97c7 100644 --- a/be/src/exec/schema_scanner/schema_table_properties_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_table_properties_scanner.cpp @@ -27,6 +27,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaTablePropertiesScanner::_s_tbls_columns = { {"TABLE_CATALOG", TYPE_VARCHAR, sizeof(StringRef), true}, {"TABLE_SCHEMA", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -108,7 +110,7 @@ Status SchemaTablePropertiesScanner::get_onedb_info_from_fe(int64_t dbId) { } _tableproperties_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError("table options schema is not match for FE and BE"); } @@ -148,7 +150,7 @@ Status SchemaTablePropertiesScanner::get_next_block_internal(vectorized::Block* if (_db_index < _db_result.db_ids.size()) { RETURN_IF_ERROR(get_onedb_info_from_fe(_db_result.db_ids[_db_index])); _row_idx = 0; // reset row index so that it start filling for next block. - _total_rows = _tableproperties_block->rows(); + _total_rows = (int)_tableproperties_block->rows(); _db_index++; } } diff --git a/be/src/exec/schema_scanner/schema_workload_group_privileges.cpp b/be/src/exec/schema_scanner/schema_workload_group_privileges.cpp index a91a28322ecd76..bdf306ef7d94ad 100644 --- a/be/src/exec/schema_scanner/schema_workload_group_privileges.cpp +++ b/be/src/exec/schema_scanner/schema_workload_group_privileges.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaWorkloadGroupPrivilegesScanner::_s_tbls_columns = { {"GRANTEE", TYPE_VARCHAR, sizeof(StringRef), true}, {"WORKLOAD_GROUP_NAME", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -83,7 +85,7 @@ Status SchemaWorkloadGroupPrivilegesScanner::_get_workload_group_privs_block_fro } if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError( "workload group privileges schema is not match for FE and BE"); @@ -116,7 +118,7 @@ Status SchemaWorkloadGroupPrivilegesScanner::get_next_block_internal(vectorized: if (_workload_groups_privs_block == nullptr) { RETURN_IF_ERROR(_get_workload_group_privs_block_from_fe()); - _total_rows = _workload_groups_privs_block->rows(); + _total_rows = (int)_workload_groups_privs_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_workload_group_resource_usage_scanner.cpp b/be/src/exec/schema_scanner/schema_workload_group_resource_usage_scanner.cpp index ca339044e98a5f..805bf12cc38ae6 100644 --- a/be/src/exec/schema_scanner/schema_workload_group_resource_usage_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_workload_group_resource_usage_scanner.cpp @@ -28,6 +28,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaBackendWorkloadGroupResourceUsage::_s_tbls_columns = { // name, type, size {"BE_ID", TYPE_BIGINT, sizeof(int64_t), false}, @@ -70,7 +72,7 @@ Status SchemaBackendWorkloadGroupResourceUsage::get_next_block_internal(vectoriz } ExecEnv::GetInstance()->workload_group_mgr()->get_wg_resource_usage(_block.get()); - _total_rows = _block->rows(); + _total_rows = (int)_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_workload_groups_scanner.cpp b/be/src/exec/schema_scanner/schema_workload_groups_scanner.cpp index 481360eee90557..bc5fb61669c525 100644 --- a/be/src/exec/schema_scanner/schema_workload_groups_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_workload_groups_scanner.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaWorkloadGroupsScanner::_s_tbls_columns = { {"ID", TYPE_BIGINT, sizeof(int64_t), true}, {"NAME", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -98,7 +100,7 @@ Status SchemaWorkloadGroupsScanner::_get_workload_groups_block_from_fe() { _workload_groups_block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError( "workload groups schema is not match for FE and BE"); @@ -127,7 +129,7 @@ Status SchemaWorkloadGroupsScanner::get_next_block_internal(vectorized::Block* b if (_workload_groups_block == nullptr) { RETURN_IF_ERROR(_get_workload_groups_block_from_fe()); - _total_rows = _workload_groups_block->rows(); + _total_rows = (int)_workload_groups_block->rows(); } if (_row_idx == _total_rows) { diff --git a/be/src/exec/schema_scanner/schema_workload_sched_policy_scanner.cpp b/be/src/exec/schema_scanner/schema_workload_sched_policy_scanner.cpp index 5c6a6f70a88a86..fa1c671f5eeea0 100644 --- a/be/src/exec/schema_scanner/schema_workload_sched_policy_scanner.cpp +++ b/be/src/exec/schema_scanner/schema_workload_sched_policy_scanner.cpp @@ -26,6 +26,8 @@ #include "vec/data_types/data_type_factory.hpp" namespace doris { +#include "common/compile_check_begin.h" + std::vector SchemaWorkloadSchedulePolicyScanner::_s_tbls_columns = { {"ID", TYPE_BIGINT, sizeof(int64_t), true}, {"NAME", TYPE_VARCHAR, sizeof(StringRef), true}, @@ -89,7 +91,7 @@ Status SchemaWorkloadSchedulePolicyScanner::_get_workload_schedule_policy_block_ _block->reserve(_block_rows_limit); if (result_data.size() > 0) { - int col_size = result_data[0].column_value.size(); + auto col_size = result_data[0].column_value.size(); if (col_size != _s_tbls_columns.size()) { return Status::InternalError( "workload policy schema is not match for FE and BE"); @@ -118,7 +120,7 @@ Status SchemaWorkloadSchedulePolicyScanner::get_next_block_internal(vectorized:: if (_block == nullptr) { RETURN_IF_ERROR(_get_workload_schedule_policy_block_from_fe()); - _total_rows = _block->rows(); + _total_rows = (int)_block->rows(); } if (_row_idx == _total_rows) { From a16b6829e6afed3efe2c503cca04340529ce0e98 Mon Sep 17 00:00:00 2001 From: Tiewei Fang Date: Tue, 24 Dec 2024 10:40:58 +0800 Subject: [PATCH 70/82] [fix](TrinoConnector) fix the error message when querying a not-existent table with TrinoConnector (#45799) Problem Summary: fix the error message when querying a not-existent table with TrinoConnector --- .../datasource/trinoconnector/TrinoConnectorExternalTable.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/TrinoConnectorExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/TrinoConnectorExternalTable.java index 27f9b8086a9cef..007ad864da3af8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/TrinoConnectorExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/trinoconnector/TrinoConnectorExternalTable.java @@ -108,7 +108,8 @@ public Optional initSchema() { qualifiedTable.asSchemaTableName(), Optional.empty(), Optional.empty())); } if (!connectorTableHandle.isPresent()) { - throw new RuntimeException(String.format("Table does not exist: %s.%s.%s", qualifiedTable)); + throw new RuntimeException(String.format("Table does not exist: %s.%s.%s", trinoConnectorCatalog.getName(), + dbName, name)); } // 4. Get ColumnHandle From 9b0d962e6e9d9b89a06ac1f491c5bb3a8d071671 Mon Sep 17 00:00:00 2001 From: minghong Date: Tue, 24 Dec 2024 11:24:34 +0800 Subject: [PATCH 71/82] [feat](nereids) when check shape failed, print all plan with memo (#45627) ### What problem does this PR solve? when check plan shape failed, print all plan and memo into log for debug --- .../apache/doris/nereids/NereidsPlanner.java | 3 ++ .../doris/regression/suite/Suite.groovy | 28 +++++++++++++------ 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java index 6b1c1dd6734435..b6b09348046cea 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/NereidsPlanner.java @@ -713,6 +713,9 @@ public String getExplainString(ExplainOptions explainOptions) { + "========== OPTIMIZED PLAN " + getTimeMetricString(SummaryProfile::getPrettyNereidsOptimizeTime) + " ==========\n" + optimizedPlan.treeString() + "\n\n"; + if (cascadesContext != null && cascadesContext.getMemo() != null) { + plan += "========== MEMO " + cascadesContext.getMemo().toString() + "\n\n"; + } if (distributedPlans != null && !distributedPlans.isEmpty()) { plan += "========== DISTRIBUTED PLAN " diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index f5d811514b375d..16cc6bbd21b003 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -71,6 +71,7 @@ import java.util.concurrent.Future import java.util.concurrent.ThreadFactory import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicBoolean +import java.util.regex.Pattern import java.util.stream.Collectors import java.util.stream.LongStream import static org.apache.doris.regression.util.DataUtils.sortByToString @@ -263,7 +264,7 @@ class Suite implements GroovyInterceptable { } public T connect(String user = context.config.jdbcUser, String password = context.config.jdbcPassword, - String url = context.config.jdbcUrl, Closure actionSupplier) { + String url = context.config.jdbcUrl, Closure actionSupplier) { return context.connect(user, password, url, actionSupplier) } @@ -640,7 +641,7 @@ class Suite implements GroovyInterceptable { } long getTableVersion(long dbId, String tableName) { - def result = sql_return_maparray """show proc '/dbs/${dbId}'""" + def result = sql_return_maparray """show proc '/dbs/${dbId}'""" for (def res : result) { if(res.TableName.equals(tableName)) { log.info(res.toString()) @@ -989,7 +990,7 @@ class Suite implements GroovyInterceptable { if (exitcode != 0) { staticLogger.info("exit code: ${exitcode}, output\n: ${proc.text}") if (mustSuc == true) { - Assert.assertEquals(0, exitcode) + Assert.assertEquals(0, exitcode) } } } catch (IOException e) { @@ -1119,7 +1120,7 @@ class Suite implements GroovyInterceptable { Connection getTargetConnection() { return context.getTargetConnection(this) } - + boolean deleteFile(String filePath) { def file = new File(filePath) file.delete() @@ -1142,7 +1143,7 @@ class Suite implements GroovyInterceptable { ) DISTRIBUTED BY HASH(id) BUCKETS 1 PROPERTIES ( - "replication_num" = "${backends.size()}" + "replication_num" = "${backends.size()}" ) """ @@ -1314,13 +1315,24 @@ class Suite implements GroovyInterceptable { throw new IllegalStateException("Check tag '${tag}' failed, sql:\n${arg}", t) } if (errorMsg != null) { + def allPlan = "" + if (arg instanceof String) { + def query = (String) arg; + def pattern = Pattern.compile("^\\s*explain\\s+shape\\s*plan\\s*", Pattern.MULTILINE) + if (query =~ pattern) { + def physical = query.replaceAll(pattern, "explain all plan ") + try { + allPlan = JdbcUtils.executeToStringList(context.getConnection(), physical)[0].join('\n') + } catch (Throwable ignore) {} + } + } String csvRealResult = realResults.stream() - .map {row -> OutputUtils.toCsvString(row)} - .collect(Collectors.joining("\n")) + .map { row -> OutputUtils.toCsvString(row) } + .collect(Collectors.joining("\n")) def outputFilePath = context.outputFile.getCanonicalPath().substring(context.config.dataPath.length() + 1) def line = expectCsvResults.currentLine() logger.warn("expect results in file: ${outputFilePath}, line: ${line}\nrealResults:\n" + csvRealResult) - throw new IllegalStateException("Check tag '${tag}' failed:\n${errorMsg}\n\nsql:\n${arg}") + throw new IllegalStateException("Check tag '${tag}' failed:\n${errorMsg}\n\nsql:\n${arg}\n\n${allPlan}") } } } From 6d6ff2d56dca970756b9b78778cf80c772b35f2d Mon Sep 17 00:00:00 2001 From: James Date: Tue, 24 Dec 2024 11:58:51 +0800 Subject: [PATCH 72/82] [feature](mtmv)Support iceberg mtmv query. (#45659) ### What problem does this PR solve? 1. Implement MvccTable interface for IcebertExternalTable 2. IcebergExternalTable overrides the methods in ExternalTable and supports partition pruning 3. Add snapshot cache in IcebergMetadataCache to store IcebergExternalTable partition infos. Issue Number: close #xxx Related PR: #xxx Problem Summary: ### Release note None --- .../doris/catalog/RangePartitionItem.java | 7 +- .../datasource/hive/HMSExternalTable.java | 2 +- .../iceberg/IcebergExternalTable.java | 171 +++++++++++------- .../iceberg/IcebergMetadataCache.java | 42 ++++- .../iceberg/IcebergMvccSnapshot.java | 32 ++++ .../iceberg/IcebergSchemaCacheKey.java | 55 ++++++ .../iceberg/IcebergSchemaCacheValue.java | 15 +- .../datasource/iceberg/IcebergSnapshot.java | 36 ++++ .../iceberg/IcebergSnapshotCacheValue.java | 37 ++++ .../datasource/iceberg/IcebergUtils.java | 14 +- .../iceberg/IcebergExternalTableTest.java | 48 +++-- .../data/mtmv_p0/test_iceberg_mtmv.out | 15 ++ .../suites/mtmv_p0/test_iceberg_mtmv.groovy | 56 ++++++ 13 files changed, 423 insertions(+), 107 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMvccSnapshot.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheKey.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshot.java create mode 100644 fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshotCacheValue.java diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java index 96bf0097c28a51..cad6ca38130420 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java +++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/RangePartitionItem.java @@ -65,14 +65,9 @@ public boolean isDefaultPartition() { @Override public PartitionKeyDesc toPartitionKeyDesc() { - if (partitionKeyRange.hasLowerBound()) { - return PartitionKeyDesc.createFixed( + return PartitionKeyDesc.createFixed( PartitionInfo.toPartitionValue(partitionKeyRange.lowerEndpoint()), PartitionInfo.toPartitionValue(partitionKeyRange.upperEndpoint())); - } else { - // For null partition value. - return PartitionKeyDesc.createLessThan(PartitionInfo.toPartitionValue(partitionKeyRange.upperEndpoint())); - } } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java index da4670d6d0589d..a6fb486bed9c65 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalTable.java @@ -543,7 +543,7 @@ public Optional initSchema() { } private List getIcebergSchema() { - return IcebergUtils.getSchema(catalog, dbName, name); + return IcebergUtils.getSchema(catalog, dbName, name, IcebergUtils.UNKNOWN_SNAPSHOT_ID); } private List getHudiSchema() { diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java index e259399f63740b..7f7d2fdf578292 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergExternalTable.java @@ -27,9 +27,14 @@ import org.apache.doris.catalog.RangePartitionItem; import org.apache.doris.common.AnalysisException; import org.apache.doris.common.DdlException; +import org.apache.doris.datasource.CacheException; +import org.apache.doris.datasource.ExternalSchemaCache; +import org.apache.doris.datasource.ExternalSchemaCache.SchemaCacheKey; import org.apache.doris.datasource.ExternalTable; import org.apache.doris.datasource.SchemaCacheValue; import org.apache.doris.datasource.mvcc.MvccSnapshot; +import org.apache.doris.datasource.mvcc.MvccTable; +import org.apache.doris.datasource.mvcc.MvccUtil; import org.apache.doris.mtmv.MTMVBaseTableIf; import org.apache.doris.mtmv.MTMVRefreshContext; import org.apache.doris.mtmv.MTMVRelatedTableIf; @@ -77,7 +82,7 @@ import java.util.Set; import java.util.stream.Collectors; -public class IcebergExternalTable extends ExternalTable implements MTMVRelatedTableIf, MTMVBaseTableIf { +public class IcebergExternalTable extends ExternalTable implements MTMVRelatedTableIf, MTMVBaseTableIf, MvccTable { public static final String YEAR = "year"; public static final String MONTH = "month"; @@ -117,39 +122,23 @@ public void setPartitionColumns(List partitionColumns) { } @Override - public Optional initSchema() { - table = IcebergUtils.getIcebergTable(catalog, dbName, name); - List schema = IcebergUtils.getSchema(catalog, dbName, name); - Snapshot snapshot = table.currentSnapshot(); - if (snapshot == null) { - LOG.debug("Table {} is empty", name); - return Optional.of(new IcebergSchemaCacheValue(schema, null, -1, null)); - } - long snapshotId = snapshot.snapshotId(); - partitionColumns = null; - IcebergPartitionInfo partitionInfo = null; - if (isValidRelatedTable()) { - PartitionSpec spec = table.spec(); - partitionColumns = Lists.newArrayList(); - - // For iceberg table, we only support table with 1 partition column as RelatedTable. - // So we use spec.fields().get(0) to get the partition column. - Types.NestedField col = table.schema().findField(spec.fields().get(0).sourceId()); + public Optional initSchema(SchemaCacheKey key) { + table = getIcebergTable(); + List schema = IcebergUtils.getSchema(catalog, dbName, name, + ((IcebergSchemaCacheKey) key).getSchemaId()); + List tmpColumns = Lists.newArrayList(); + PartitionSpec spec = table.spec(); + for (PartitionField field : spec.fields()) { + Types.NestedField col = table.schema().findField(field.sourceId()); for (Column c : schema) { if (c.getName().equalsIgnoreCase(col.name())) { - partitionColumns.add(c); + tmpColumns.add(c); break; } } - Preconditions.checkState(partitionColumns.size() == 1, - "Support 1 partition column for iceberg table, but found " + partitionColumns.size()); - try { - partitionInfo = loadPartitionInfo(); - } catch (AnalysisException e) { - LOG.warn("Failed to load iceberg table {} partition info.", name, e); - } } - return Optional.of(new IcebergSchemaCacheValue(schema, partitionColumns, snapshotId, partitionInfo)); + partitionColumns = tmpColumns; + return Optional.of(new IcebergSchemaCacheValue(schema, partitionColumns)); } @Override @@ -187,6 +176,11 @@ public Table getIcebergTable() { return IcebergUtils.getIcebergTable(getCatalog(), getDbName(), getName()); } + private IcebergSnapshotCacheValue getIcebergSnapshotCacheValue() { + return Env.getCurrentEnv().getExtMetaCacheMgr().getIcebergMetadataCache() + .getSnapshotCache(catalog, dbName, name); + } + @Override public void beforeMTMVRefresh(MTMV mtmv) throws DdlException { Env.getCurrentEnv().getRefreshManager() @@ -195,46 +189,36 @@ public void beforeMTMVRefresh(MTMV mtmv) throws DdlException { @Override public Map getAndCopyPartitionItems(Optional snapshot) { - return Maps.newHashMap(getPartitionInfoFromCache().getNameToPartitionItem()); + return Maps.newHashMap(getOrFetchSnapshotCacheValue(snapshot).getPartitionInfo().getNameToPartitionItem()); } - private IcebergPartitionInfo getPartitionInfoFromCache() { - makeSureInitialized(); - Optional schemaCacheValue = getSchemaCacheValue(); - if (!schemaCacheValue.isPresent()) { - return new IcebergPartitionInfo(); - } - return ((IcebergSchemaCacheValue) schemaCacheValue.get()).getPartitionInfo(); + @Override + public Map getNameToPartitionItems(Optional snapshot) { + return getOrFetchSnapshotCacheValue(snapshot).getPartitionInfo().getNameToPartitionItem(); } @Override public PartitionType getPartitionType(Optional snapshot) { - makeSureInitialized(); return isValidRelatedTable() ? PartitionType.RANGE : PartitionType.UNPARTITIONED; } @Override public Set getPartitionColumnNames(Optional snapshot) throws DdlException { - return getPartitionColumnsFromCache().stream().map(Column::getName).collect(Collectors.toSet()); + return getPartitionColumns(snapshot).stream().map(Column::getName).collect(Collectors.toSet()); } @Override public List getPartitionColumns(Optional snapshot) { - return getPartitionColumnsFromCache(); - } - - private List getPartitionColumnsFromCache() { - makeSureInitialized(); - Optional schemaCacheValue = getSchemaCacheValue(); - return schemaCacheValue - .map(cacheValue -> ((IcebergSchemaCacheValue) cacheValue).getPartitionColumns()) - .orElseGet(Lists::newArrayList); + IcebergSnapshotCacheValue snapshotValue = getOrFetchSnapshotCacheValue(snapshot); + IcebergSchemaCacheValue schemaValue = getIcebergSchemaCacheValue(snapshotValue.getSnapshot().getSchemaId()); + return schemaValue.getPartitionColumns(); } @Override public MTMVSnapshotIf getPartitionSnapshot(String partitionName, MTMVRefreshContext context, Optional snapshot) throws AnalysisException { - long latestSnapshotId = getPartitionInfoFromCache().getLatestSnapshotId(partitionName); + IcebergSnapshotCacheValue snapshotValue = getOrFetchSnapshotCacheValue(snapshot); + long latestSnapshotId = snapshotValue.getPartitionInfo().getLatestSnapshotId(partitionName); if (latestSnapshotId <= 0) { throw new AnalysisException("can not find partition: " + partitionName); } @@ -244,16 +228,9 @@ public MTMVSnapshotIf getPartitionSnapshot(String partitionName, MTMVRefreshCont @Override public MTMVSnapshotIf getTableSnapshot(MTMVRefreshContext context, Optional snapshot) throws AnalysisException { - return new MTMVVersionSnapshot(getLatestSnapshotIdFromCache()); - } - - public long getLatestSnapshotIdFromCache() throws AnalysisException { makeSureInitialized(); - Optional schemaCacheValue = getSchemaCacheValue(); - if (!schemaCacheValue.isPresent()) { - throw new AnalysisException("Can't find schema cache of table " + name); - } - return ((IcebergSchemaCacheValue) schemaCacheValue.get()).getSnapshotId(); + IcebergSnapshotCacheValue snapshotValue = getOrFetchSnapshotCacheValue(snapshot); + return new MTMVVersionSnapshot(snapshotValue.getSnapshot().getSnapshotId()); } @Override @@ -268,11 +245,13 @@ public boolean isPartitionColumnAllowNull() { */ @Override public boolean isValidRelatedTable() { + makeSureInitialized(); if (isValidRelatedTableCached) { return isValidRelatedTable; } isValidRelatedTable = false; Set allFields = Sets.newHashSet(); + table = getIcebergTable(); for (PartitionSpec spec : table.specs().values()) { if (spec == null) { isValidRelatedTableCached = true; @@ -299,14 +278,62 @@ public boolean isValidRelatedTable() { return isValidRelatedTable; } - protected IcebergPartitionInfo loadPartitionInfo() throws AnalysisException { - List icebergPartitions = loadIcebergPartition(); + @Override + public MvccSnapshot loadSnapshot() { + return new IcebergMvccSnapshot(getIcebergSnapshotCacheValue()); + } + + public long getLatestSnapshotId() { + table = getIcebergTable(); + Snapshot snapshot = table.currentSnapshot(); + return snapshot == null ? IcebergUtils.UNKNOWN_SNAPSHOT_ID : table.currentSnapshot().snapshotId(); + } + + public long getSchemaId(long snapshotId) { + table = getIcebergTable(); + return snapshotId == IcebergUtils.UNKNOWN_SNAPSHOT_ID + ? IcebergUtils.UNKNOWN_SNAPSHOT_ID + : table.snapshot(snapshotId).schemaId(); + } + + @Override + public List getFullSchema() { + Optional snapshotFromContext = MvccUtil.getSnapshotFromContext(this); + IcebergSnapshotCacheValue cacheValue = getOrFetchSnapshotCacheValue(snapshotFromContext); + return getIcebergSchemaCacheValue(cacheValue.getSnapshot().getSchemaId()).getSchema(); + } + + @Override + public boolean supportInternalPartitionPruned() { + return true; + } + + public IcebergSchemaCacheValue getIcebergSchemaCacheValue(long schemaId) { + ExternalSchemaCache cache = Env.getCurrentEnv().getExtMetaCacheMgr().getSchemaCache(catalog); + Optional schemaCacheValue = cache.getSchemaValue( + new IcebergSchemaCacheKey(dbName, name, schemaId)); + if (!schemaCacheValue.isPresent()) { + throw new CacheException("failed to getSchema for: %s.%s.%s.%s", + null, catalog.getName(), dbName, name, schemaId); + } + return (IcebergSchemaCacheValue) schemaCacheValue.get(); + } + + public IcebergPartitionInfo loadPartitionInfo(long snapshotId) throws AnalysisException { + // snapshotId == UNKNOWN_SNAPSHOT_ID means this is an empty table, haven't contained any snapshot yet. + if (!isValidRelatedTable() || snapshotId == IcebergUtils.UNKNOWN_SNAPSHOT_ID) { + return new IcebergPartitionInfo(); + } + List icebergPartitions = loadIcebergPartition(snapshotId); Map nameToPartition = Maps.newHashMap(); Map nameToPartitionItem = Maps.newHashMap(); + table = getIcebergTable(); + partitionColumns = getIcebergSchemaCacheValue(table.snapshot(snapshotId).schemaId()).getPartitionColumns(); for (IcebergPartition partition : icebergPartitions) { nameToPartition.put(partition.getPartitionName(), partition); String transform = table.specs().get(partition.getSpecId()).fields().get(0).transform().toString(); - Range partitionRange = getPartitionRange(partition.getPartitionValues().get(0), transform); + Range partitionRange = getPartitionRange( + partition.getPartitionValues().get(0), transform, partitionColumns); PartitionItem item = new RangePartitionItem(partitionRange); nameToPartitionItem.put(partition.getPartitionName(), item); } @@ -314,11 +341,11 @@ protected IcebergPartitionInfo loadPartitionInfo() throws AnalysisException { return new IcebergPartitionInfo(nameToPartitionItem, nameToPartition, partitionNameMap); } - public List loadIcebergPartition() { + public List loadIcebergPartition(long snapshotId) { PartitionsTable partitionsTable = (PartitionsTable) MetadataTableUtils .createMetadataTableInstance(table, MetadataTableType.PARTITIONS); List partitions = Lists.newArrayList(); - try (CloseableIterable tasks = partitionsTable.newScan().planFiles()) { + try (CloseableIterable tasks = partitionsTable.newScan().useSnapshot(snapshotId).planFiles()) { for (FileScanTask task : tasks) { CloseableIterable rows = task.asDataTask().rows(); for (StructLike row : rows) { @@ -344,6 +371,7 @@ public IcebergPartition generateIcebergPartition(StructLike row) { // 8. equality_delete_file_count, // 9. last_updated_at, // 10. last_updated_snapshot_id + table = getIcebergTable(); Preconditions.checkState(!table.spec().fields().isEmpty(), table.name() + " is not a partition table."); int specId = row.get(1, Integer.class); PartitionSpec partitionSpec = table.specs().get(specId); @@ -382,13 +410,14 @@ public IcebergPartition generateIcebergPartition(StructLike row) { } @VisibleForTesting - public Range getPartitionRange(String value, String transform) + public Range getPartitionRange(String value, String transform, List partitionColumns) throws AnalysisException { - // For NULL value, create a lessThan partition for it. + // For NULL value, create a minimum partition for it. if (value == null) { - PartitionKey nullKey = PartitionKey.createPartitionKey( - Lists.newArrayList(new PartitionValue("0000-01-02")), partitionColumns); - return Range.lessThan(nullKey); + PartitionKey nullLowKey = PartitionKey.createPartitionKey( + Lists.newArrayList(new PartitionValue("0000-01-01")), partitionColumns); + PartitionKey nullUpKey = nullLowKey.successor(); + return Range.closedOpen(nullLowKey, nullUpKey); } LocalDateTime epoch = Instant.EPOCH.atZone(ZoneId.of("UTC")).toLocalDateTime(); LocalDateTime target; @@ -525,4 +554,12 @@ public boolean validRelatedTableCache() { public void setIsValidRelatedTableCached(boolean isCached) { this.isValidRelatedTableCached = isCached; } + + private IcebergSnapshotCacheValue getOrFetchSnapshotCacheValue(Optional snapshot) { + if (snapshot.isPresent()) { + return ((IcebergMvccSnapshot) snapshot.get()).getSnapshotCacheValue(); + } else { + return getIcebergSnapshotCacheValue(); + } + } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java index ad347ca78f2a4f..e80a013cc92195 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java @@ -18,6 +18,7 @@ package org.apache.doris.datasource.iceberg; import org.apache.doris.catalog.Env; +import org.apache.doris.common.AnalysisException; import org.apache.doris.common.CacheFactory; import org.apache.doris.common.Config; import org.apache.doris.common.UserException; @@ -49,6 +50,7 @@ public class IcebergMetadataCache { private final LoadingCache> snapshotListCache; private final LoadingCache tableCache; + private final LoadingCache snapshotCache; public IcebergMetadataCache(ExecutorService executor) { CacheFactory snapshotListCacheFactory = new CacheFactory( @@ -66,6 +68,14 @@ public IcebergMetadataCache(ExecutorService executor) { true, null); this.tableCache = tableCacheFactory.buildCache(key -> loadTable(key), null, executor); + + CacheFactory snapshotCacheFactory = new CacheFactory( + OptionalLong.of(28800L), + OptionalLong.of(Config.external_cache_expire_time_minutes_after_access * 60), + Config.max_external_table_cache_num, + true, + null); + this.snapshotCache = snapshotCacheFactory.buildCache(key -> loadSnapshot(key), null, executor); } public List getSnapshotList(TIcebergMetadataParams params) throws UserException { @@ -92,6 +102,11 @@ public Table getAndCloneTable(CatalogIf catalog, String dbName, String tbName) { return restTable; } + public IcebergSnapshotCacheValue getSnapshotCache(CatalogIf catalog, String dbName, String tbName) { + IcebergMetadataCacheKey key = IcebergMetadataCacheKey.of(catalog, dbName, tbName); + return snapshotCache.get(key); + } + @NotNull private List loadSnapshots(IcebergMetadataCacheKey key) { Table icebergTable = getIcebergTable(key.catalog, key.dbName, key.tableName); @@ -114,6 +129,16 @@ private Table loadTable(IcebergMetadataCacheKey key) { () -> ops.loadTable(key.dbName, key.tableName)); } + @NotNull + private IcebergSnapshotCacheValue loadSnapshot(IcebergMetadataCacheKey key) throws AnalysisException { + IcebergExternalTable table = (IcebergExternalTable) key.catalog.getDbOrAnalysisException(key.dbName) + .getTableOrAnalysisException(key.tableName); + long snapshotId = table.getLatestSnapshotId(); + long schemaId = table.getSchemaId(snapshotId); + IcebergPartitionInfo icebergPartitionInfo = table.loadPartitionInfo(snapshotId); + return new IcebergSnapshotCacheValue(icebergPartitionInfo, new IcebergSnapshot(snapshotId, schemaId)); + } + public void invalidateCatalogCache(long catalogId) { snapshotListCache.asMap().keySet().stream() .filter(key -> key.catalog.getId() == catalogId) @@ -125,6 +150,10 @@ public void invalidateCatalogCache(long catalogId) { ManifestFiles.dropCache(entry.getValue().io()); tableCache.invalidate(entry.getKey()); }); + + snapshotCache.asMap().keySet().stream() + .filter(key -> key.catalog.getId() == catalogId) + .forEach(snapshotCache::invalidate); } public void invalidateTableCache(long catalogId, String dbName, String tblName) { @@ -143,6 +172,11 @@ public void invalidateTableCache(long catalogId, String dbName, String tblName) ManifestFiles.dropCache(entry.getValue().io()); tableCache.invalidate(entry.getKey()); }); + + snapshotCache.asMap().keySet().stream() + .filter(key -> key.catalog.getId() == catalogId && key.dbName.equals(dbName) && key.tableName.equals( + tblName)) + .forEach(snapshotCache::invalidate); } public void invalidateDbCache(long catalogId, String dbName) { @@ -159,6 +193,10 @@ public void invalidateDbCache(long catalogId, String dbName) { ManifestFiles.dropCache(entry.getValue().io()); tableCache.invalidate(entry.getKey()); }); + + snapshotCache.asMap().keySet().stream() + .filter(key -> key.catalog.getId() == catalogId && key.dbName.equals(dbName)) + .forEach(snapshotCache::invalidate); } private static void initIcebergTableFileIO(Table table, Map props) { @@ -212,10 +250,12 @@ public int hashCode() { public Map> getCacheStats() { Map> res = Maps.newHashMap(); - res.put("iceberg_snapshot_cache", ExternalMetaCacheMgr.getCacheStats(snapshotListCache.stats(), + res.put("iceberg_snapshot_list_cache", ExternalMetaCacheMgr.getCacheStats(snapshotListCache.stats(), snapshotListCache.estimatedSize())); res.put("iceberg_table_cache", ExternalMetaCacheMgr.getCacheStats(tableCache.stats(), tableCache.estimatedSize())); + res.put("iceberg_snapshot_cache", ExternalMetaCacheMgr.getCacheStats(snapshotCache.stats(), + snapshotCache.estimatedSize())); return res; } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMvccSnapshot.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMvccSnapshot.java new file mode 100644 index 00000000000000..2c0155a71cd389 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMvccSnapshot.java @@ -0,0 +1,32 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +import org.apache.doris.datasource.mvcc.MvccSnapshot; + +public class IcebergMvccSnapshot implements MvccSnapshot { + private final IcebergSnapshotCacheValue snapshotCacheValue; + + public IcebergMvccSnapshot(IcebergSnapshotCacheValue snapshotCacheValue) { + this.snapshotCacheValue = snapshotCacheValue; + } + + public IcebergSnapshotCacheValue getSnapshotCacheValue() { + return snapshotCacheValue; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheKey.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheKey.java new file mode 100644 index 00000000000000..7931d91831fcec --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheKey.java @@ -0,0 +1,55 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +import org.apache.doris.datasource.ExternalSchemaCache.SchemaCacheKey; + +import com.google.common.base.Objects; + +public class IcebergSchemaCacheKey extends SchemaCacheKey { + private final long schemaId; + + public IcebergSchemaCacheKey(String dbName, String tableName, long schemaId) { + super(dbName, tableName); + this.schemaId = schemaId; + } + + public long getSchemaId() { + return schemaId; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof IcebergSchemaCacheKey)) { + return false; + } + if (!super.equals(o)) { + return false; + } + IcebergSchemaCacheKey that = (IcebergSchemaCacheKey) o; + return schemaId == that.schemaId; + } + + @Override + public int hashCode() { + return Objects.hashCode(super.hashCode(), schemaId); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheValue.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheValue.java index e1fde8049fe1ad..ccfcaab0c7261d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheValue.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSchemaCacheValue.java @@ -25,26 +25,13 @@ public class IcebergSchemaCacheValue extends SchemaCacheValue { private final List partitionColumns; - private final IcebergPartitionInfo partitionInfo; - private final long snapshotId; - public IcebergSchemaCacheValue(List schema, List partitionColumns, - long snapshotId, IcebergPartitionInfo partitionInfo) { + public IcebergSchemaCacheValue(List schema, List partitionColumns) { super(schema); this.partitionColumns = partitionColumns; - this.snapshotId = snapshotId; - this.partitionInfo = partitionInfo; } public List getPartitionColumns() { return partitionColumns; } - - public IcebergPartitionInfo getPartitionInfo() { - return partitionInfo; - } - - public long getSnapshotId() { - return snapshotId; - } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshot.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshot.java new file mode 100644 index 00000000000000..5903c362d7434e --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshot.java @@ -0,0 +1,36 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +public class IcebergSnapshot { + private final long snapshotId; + private final long schemaId; + + public IcebergSnapshot(long snapshotId, long schemaId) { + this.snapshotId = snapshotId; + this.schemaId = schemaId; + } + + public long getSnapshotId() { + return snapshotId; + } + + public long getSchemaId() { + return schemaId; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshotCacheValue.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshotCacheValue.java new file mode 100644 index 00000000000000..95c9a6f26cc5c5 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergSnapshotCacheValue.java @@ -0,0 +1,37 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.datasource.iceberg; + +public class IcebergSnapshotCacheValue { + + private final IcebergPartitionInfo partitionInfo; + private final IcebergSnapshot snapshot; + + public IcebergSnapshotCacheValue(IcebergPartitionInfo partitionInfo, IcebergSnapshot snapshot) { + this.partitionInfo = partitionInfo; + this.snapshot = snapshot; + } + + public IcebergPartitionInfo getPartitionInfo() { + return partitionInfo; + } + + public IcebergSnapshot getSnapshot() { + return snapshot; + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java index ba6d628e492c20..a7507fe031ff68 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java +++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java @@ -52,6 +52,7 @@ import org.apache.doris.nereids.exceptions.NotSupportedException; import org.apache.doris.thrift.TExprOpcode; +import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.apache.iceberg.CatalogProperties; import org.apache.iceberg.FileFormat; @@ -107,6 +108,8 @@ public Integer initialValue() { // nickname in spark public static final String SPARK_SQL_COMPRESSION_CODEC = "spark.sql.iceberg.compression-codec"; + public static final long UNKNOWN_SNAPSHOT_ID = -1; + public static Expression convertToIcebergExpr(Expr expr, Schema schema) { if (expr == null) { return null; @@ -573,10 +576,17 @@ private static org.apache.iceberg.Table getIcebergTableInternal(ExternalCatalog /** * Get iceberg schema from catalog and convert them to doris schema */ - public static List getSchema(ExternalCatalog catalog, String dbName, String name) { + public static List getSchema(ExternalCatalog catalog, String dbName, String name, long schemaId) { return HiveMetaStoreClientHelper.ugiDoAs(catalog.getConfiguration(), () -> { org.apache.iceberg.Table icebergTable = getIcebergTable(catalog, dbName, name); - Schema schema = icebergTable.schema(); + Schema schema; + if (schemaId == UNKNOWN_SNAPSHOT_ID || icebergTable.currentSnapshot() == null) { + schema = icebergTable.schema(); + } else { + schema = icebergTable.schemas().get((int) schemaId); + } + Preconditions.checkNotNull(schema, + "Schema for table " + catalog.getName() + "." + dbName + "." + name + " is null"); List columns = schema.columns(); List tmpSchema = Lists.newArrayListWithCapacity(columns.size()); for (Types.NestedField field : columns) { diff --git a/fe/fe-core/src/test/java/org/apache/doris/datasource/iceberg/IcebergExternalTableTest.java b/fe/fe-core/src/test/java/org/apache/doris/datasource/iceberg/IcebergExternalTableTest.java index 80d0a7c2429df3..3ba4804e52279c 100644 --- a/fe/fe-core/src/test/java/org/apache/doris/datasource/iceberg/IcebergExternalTableTest.java +++ b/fe/fe-core/src/test/java/org/apache/doris/datasource/iceberg/IcebergExternalTableTest.java @@ -28,17 +28,21 @@ import com.google.common.collect.Maps; import com.google.common.collect.Range; import mockit.Expectations; +import mockit.Mock; +import mockit.MockUp; import mockit.Mocked; import mockit.Verifications; import org.apache.iceberg.PartitionField; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; +import org.apache.iceberg.Table; import org.apache.iceberg.transforms.Days; import org.apache.iceberg.transforms.Hours; import org.apache.iceberg.transforms.Months; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -52,6 +56,16 @@ public void testIsSupportedPartitionTable(@Mocked org.apache.iceberg.Table icebe @Mocked Schema schema) { IcebergExternalTable table = new IcebergExternalTable(1, "1", "2", null); Map specs = Maps.newHashMap(); + new MockUp() { + @Mock + private void makeSureInitialized() { + } + + @Mock + public Table getIcebergTable() { + return icebergTable; + } + }; // Test null specs.put(0, null); new Expectations() {{ @@ -139,34 +153,35 @@ public void testGetPartitionRange() throws AnalysisException { table.setPartitionColumns(partitionColumns); // Test null partition value - Range nullRange = table.getPartitionRange(null, "hour"); - Assertions.assertFalse(nullRange.hasLowerBound()); - Assertions.assertEquals("0000-01-02 00:00:00", + Range nullRange = table.getPartitionRange(null, "hour", partitionColumns); + Assertions.assertEquals("0000-01-01 00:00:00", + nullRange.lowerEndpoint().getPartitionValuesAsStringList().get(0)); + Assertions.assertEquals("0000-01-01 00:00:01", nullRange.upperEndpoint().getPartitionValuesAsStringList().get(0)); // Test hour transform. - Range hour = table.getPartitionRange("100", "hour"); + Range hour = table.getPartitionRange("100", "hour", partitionColumns); PartitionKey lowKey = hour.lowerEndpoint(); PartitionKey upKey = hour.upperEndpoint(); Assertions.assertEquals("1970-01-05 04:00:00", lowKey.getPartitionValuesAsStringList().get(0)); Assertions.assertEquals("1970-01-05 05:00:00", upKey.getPartitionValuesAsStringList().get(0)); // Test day transform. - Range day = table.getPartitionRange("100", "day"); + Range day = table.getPartitionRange("100", "day", partitionColumns); lowKey = day.lowerEndpoint(); upKey = day.upperEndpoint(); Assertions.assertEquals("1970-04-11 00:00:00", lowKey.getPartitionValuesAsStringList().get(0)); Assertions.assertEquals("1970-04-12 00:00:00", upKey.getPartitionValuesAsStringList().get(0)); // Test month transform. - Range month = table.getPartitionRange("100", "month"); + Range month = table.getPartitionRange("100", "month", partitionColumns); lowKey = month.lowerEndpoint(); upKey = month.upperEndpoint(); Assertions.assertEquals("1978-05-01 00:00:00", lowKey.getPartitionValuesAsStringList().get(0)); Assertions.assertEquals("1978-06-01 00:00:00", upKey.getPartitionValuesAsStringList().get(0)); // Test year transform. - Range year = table.getPartitionRange("100", "year"); + Range year = table.getPartitionRange("100", "year", partitionColumns); lowKey = year.lowerEndpoint(); upKey = year.upperEndpoint(); Assertions.assertEquals("2070-01-01 00:00:00", lowKey.getPartitionValuesAsStringList().get(0)); @@ -174,7 +189,7 @@ public void testGetPartitionRange() throws AnalysisException { // Test unsupported transform Exception exception = Assertions.assertThrows(RuntimeException.class, () -> { - table.getPartitionRange("100", "bucket"); + table.getPartitionRange("100", "bucket", partitionColumns); }); Assertions.assertEquals("Unsupported transform bucket", exception.getMessage()); } @@ -183,15 +198,16 @@ public void testGetPartitionRange() throws AnalysisException { public void testSortRange() throws AnalysisException { IcebergExternalTable table = new IcebergExternalTable(1, "1", "2", null); Column c = new Column("c", PrimitiveType.DATETIMEV2); + ArrayList columns = Lists.newArrayList(c); table.setPartitionColumns(Lists.newArrayList(c)); - PartitionItem nullRange = new RangePartitionItem(table.getPartitionRange(null, "hour")); - PartitionItem year1970 = new RangePartitionItem(table.getPartitionRange("0", "year")); - PartitionItem year1971 = new RangePartitionItem(table.getPartitionRange("1", "year")); - PartitionItem month197002 = new RangePartitionItem(table.getPartitionRange("1", "month")); - PartitionItem month197103 = new RangePartitionItem(table.getPartitionRange("14", "month")); - PartitionItem month197204 = new RangePartitionItem(table.getPartitionRange("27", "month")); - PartitionItem day19700202 = new RangePartitionItem(table.getPartitionRange("32", "day")); - PartitionItem day19730101 = new RangePartitionItem(table.getPartitionRange("1096", "day")); + PartitionItem nullRange = new RangePartitionItem(table.getPartitionRange(null, "hour", columns)); + PartitionItem year1970 = new RangePartitionItem(table.getPartitionRange("0", "year", columns)); + PartitionItem year1971 = new RangePartitionItem(table.getPartitionRange("1", "year", columns)); + PartitionItem month197002 = new RangePartitionItem(table.getPartitionRange("1", "month", columns)); + PartitionItem month197103 = new RangePartitionItem(table.getPartitionRange("14", "month", columns)); + PartitionItem month197204 = new RangePartitionItem(table.getPartitionRange("27", "month", columns)); + PartitionItem day19700202 = new RangePartitionItem(table.getPartitionRange("32", "day", columns)); + PartitionItem day19730101 = new RangePartitionItem(table.getPartitionRange("1096", "day", columns)); Map map = Maps.newHashMap(); map.put("nullRange", nullRange); map.put("year1970", year1970); diff --git a/regression-test/data/mtmv_p0/test_iceberg_mtmv.out b/regression-test/data/mtmv_p0/test_iceberg_mtmv.out index c9d9799da81300..483ac0957e6f67 100644 --- a/regression-test/data/mtmv_p0/test_iceberg_mtmv.out +++ b/regression-test/data/mtmv_p0/test_iceberg_mtmv.out @@ -103,3 +103,18 @@ 2024-09-30 6 2024-10-28 7 +-- !refresh_one_partition -- +2024-01-01T00:00 4 + +-- !refresh_one_partition_rewrite -- +2024-01-01T00:00 4 +2024-01-02T00:00 3 + +-- !refresh_auto -- +2024-01-01T00:00 4 +2024-01-02T00:00 3 + +-- !refresh_all_partition_rewrite -- +2024-01-01T00:00 4 +2024-01-02T00:00 3 + diff --git a/regression-test/suites/mtmv_p0/test_iceberg_mtmv.groovy b/regression-test/suites/mtmv_p0/test_iceberg_mtmv.groovy index 59cf1173acb46b..aee80d8d1693a4 100644 --- a/regression-test/suites/mtmv_p0/test_iceberg_mtmv.groovy +++ b/regression-test/suites/mtmv_p0/test_iceberg_mtmv.groovy @@ -83,6 +83,7 @@ suite("test_iceberg_mtmv", "p0,external,iceberg,external_docker,external_docker_ String icebergDb = "iceberg_mtmv_partition" String icebergTable1 = "tstable" String icebergTable2 = "dtable" + String icebergTable3 = "union_test" sql """drop catalog if exists ${catalog_name} """ sql """create catalog if not exists ${catalog_name} properties ( 'type'='iceberg', @@ -210,6 +211,61 @@ suite("test_iceberg_mtmv", "p0,external,iceberg,external_docker,external_docker_ sql """drop materialized view if exists ${mvName2};""" sql """drop table if exists ${catalog_name}.${icebergDb}.${icebergTable2}""" + // Test rewrite and union partitions + sql """set materialized_view_rewrite_enable_contain_external_table=true;""" + String mvSql = "SELECT par,count(*) as num FROM ${catalog_name}.${icebergDb}.${icebergTable3} group by par" + String mvName = "union_mv" + sql """drop table if exists ${catalog_name}.${icebergDb}.${icebergTable3}""" + sql """ + CREATE TABLE ${catalog_name}.${icebergDb}.${icebergTable3} ( + id int, + value int, + par datetime + ) ENGINE=iceberg + PARTITION BY LIST (day(par)) (); + """ + sql """insert into ${catalog_name}.${icebergDb}.${icebergTable3} values (1, 1, "2024-01-01"), (2, 1, "2024-01-01"), (3, 1, "2024-01-01"), (4, 1, "2024-01-01")""" + sql """insert into ${catalog_name}.${icebergDb}.${icebergTable3} values (1, 2, "2024-01-02"), (2, 2, "2024-01-02"), (3, 2, "2024-01-02")""" + sql """analyze table ${catalog_name}.${icebergDb}.${icebergTable3} with sync""" + + sql """drop materialized view if exists ${mvName};""" + sql """ + CREATE MATERIALIZED VIEW ${mvName} + BUILD DEFERRED REFRESH AUTO ON MANUAL + partition by(`par`) + DISTRIBUTED BY RANDOM BUCKETS 2 + PROPERTIES ('replication_num' = '1') + AS ${mvSql} + """ + + def showPartitions = sql """show partitions from ${mvName}""" + logger.info("showPartitions: " + showPartitions.toString()) + assertTrue(showPartitions.toString().contains("p_20240101000000_20240102000000")) + assertTrue(showPartitions.toString().contains("p_20240102000000_20240103000000")) + + // refresh one partiton + sql """REFRESH MATERIALIZED VIEW ${mvName} partitions(p_20240101000000_20240102000000);""" + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_refresh_one_partition "SELECT * FROM ${mvName} " + def explainOnePartition = sql """ explain ${mvSql} """ + logger.info("explainOnePartition: " + explainOnePartition.toString()) + assertTrue(explainOnePartition.toString().contains("VUNION")) + order_qt_refresh_one_partition_rewrite "${mvSql}" + mv_rewrite_success("${mvSql}", "${mvName}") + + //refresh auto + sql """REFRESH MATERIALIZED VIEW ${mvName} auto""" + waitingMTMVTaskFinishedByMvName(mvName) + order_qt_refresh_auto "SELECT * FROM ${mvName} " + def explainAllPartition = sql """ explain ${mvSql}; """ + logger.info("explainAllPartition: " + explainAllPartition.toString()) + assertTrue(explainAllPartition.toString().contains("VOlapScanNode")) + order_qt_refresh_all_partition_rewrite "${mvSql}" + mv_rewrite_success("${mvSql}", "${mvName}") + + sql """drop materialized view if exists ${mvName};""" + sql """drop table if exists ${catalog_name}.${icebergDb}.${icebergTable3}""" + sql """ drop catalog if exists ${catalog_name} """ } } From 067643df6b9ee30c509846687e200814b763dae9 Mon Sep 17 00:00:00 2001 From: seawinde Date: Tue, 24 Dec 2024 14:10:01 +0800 Subject: [PATCH 73/82] [fix](mtmv) Fix mv is deleted in nested mv causing query err and fix some test (#45744) ### What problem does this PR solve? Related PR: #45045 Problem Summary: if mv2 sql contains mv1 as following, if drop mv_level_1, query mv2 directly would cause err, this fix this CREATE MATERIALIZED VIEW mv2 BUILD IMMEDIATE REFRESH COMPLETE ON MANUAL DISTRIBUTED BY RANDOM BUCKETS 2 PROPERTIES ('replication_num' = '1') AS SELECT * FROM mv_level_1; --- .../rules/analysis/CollectRelation.java | 9 ++- .../mv/nested/nested_mv_delete.out | 11 +++ .../doris/regression/suite/Suite.groovy | 39 +++++++++- .../auth_p0/test_select_column_auth.groovy | 4 + .../suites/mv_p0/unique/unique_rewrite.groovy | 9 +++ .../range_date_part_up_rewrite.groovy | 14 ++-- .../is_in_debug_mode/is_in_debug_mode.groovy | 20 +++-- .../mv/nested/nested_mv_delete.groovy | 77 +++++++++++++++++++ 8 files changed, 169 insertions(+), 14 deletions(-) create mode 100644 regression-test/data/nereids_rules_p0/mv/nested/nested_mv_delete.out create mode 100644 regression-test/suites/nereids_rules_p0/mv/nested/nested_mv_delete.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java index 9c6e3adbe74e1b..01adc549e3686d 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/rules/analysis/CollectRelation.java @@ -29,6 +29,7 @@ import org.apache.doris.nereids.analyzer.UnboundRelation; import org.apache.doris.nereids.analyzer.UnboundResultSink; import org.apache.doris.nereids.analyzer.UnboundTableSink; +import org.apache.doris.nereids.exceptions.AnalysisException; import org.apache.doris.nereids.parser.NereidsParser; import org.apache.doris.nereids.pattern.MatchingContext; import org.apache.doris.nereids.properties.PhysicalProperties; @@ -197,7 +198,13 @@ private void collectMTMVCandidates(TableIf table, CascadesContext cascadesContex try { for (BaseTableInfo baseTableInfo : mtmv.getRelation().getBaseTables()) { LOG.info("mtmv {} related base table include {}", new BaseTableInfo(mtmv), baseTableInfo); - cascadesContext.getStatementContext().getAndCacheTable(baseTableInfo.toList(), TableFrom.MTMV); + try { + cascadesContext.getStatementContext().getAndCacheTable(baseTableInfo.toList(), + TableFrom.MTMV); + } catch (AnalysisException exception) { + LOG.warn("mtmv related base table get err, related table is " + + baseTableInfo.toList(), exception); + } } } finally { mtmv.readMvUnlock(); diff --git a/regression-test/data/nereids_rules_p0/mv/nested/nested_mv_delete.out b/regression-test/data/nereids_rules_p0/mv/nested/nested_mv_delete.out new file mode 100644 index 00000000000000..65b48e2b8c1fce --- /dev/null +++ b/regression-test/data/nereids_rules_p0/mv/nested/nested_mv_delete.out @@ -0,0 +1,11 @@ +-- This file is automatically generated. You should know what you did if you want to edit this +-- !query_after_delete -- +1 1 o 10.50 2023-12-08 a b 1 yy \N +1 1 o 9.50 2023-12-08 a b 1 yy 1 +2 1 o 11.50 2023-12-09 a b 1 yy 2 +3 1 o 12.50 2023-12-10 a b 1 yy \N +3 1 o 33.50 2023-12-10 a b 1 yy 3 +4 2 o 43.20 2023-12-11 c d 2 mm \N +5 2 o 1.20 2023-12-12 c d 2 mi \N +5 2 o 56.20 2023-12-12 c d 2 mi 4 + diff --git a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy index 16cc6bbd21b003..7ed3f1cb605d60 100644 --- a/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy +++ b/regression-test/framework/src/main/groovy/org/apache/doris/regression/suite/Suite.groovy @@ -1974,6 +1974,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { success = success && result.contains("(${mv_name})") } + if (!success) { + logger.info("mv_rewrite_all_success fail =" + result) + } Assert.assertEquals(true, success) } } @@ -1984,7 +1987,11 @@ class Suite implements GroovyInterceptable { check { result -> boolean success = true; for (String mv_name : mv_names) { - Assert.assertEquals(true, result.contains("${mv_name} chose")) + def contains = result.contains("${mv_name} chose") + if (!contains) { + logger.info("mv_rewrite_all_success fail =" + result) + } + Assert.assertEquals(true, contains) } } } @@ -2011,6 +2018,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { success = success || result.contains("(${mv_name})") } + if (!success) { + logger.info("mv_rewrite_any_success fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2023,6 +2033,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { success = success || result.contains("${mv_name} chose") } + if (!success) { + logger.info("mv_rewrite_any_success fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2043,6 +2056,9 @@ class Suite implements GroovyInterceptable { def each_result = splitResult.length == 2 ? splitResult[0].contains(mv_name) : false success = success && (result.contains("(${mv_name})") || each_result) } + if (!success) { + logger.info("mv_rewrite_all_success_without_check_chosen fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2056,6 +2072,9 @@ class Suite implements GroovyInterceptable { boolean stepSuccess = result.contains("${mv_name} chose") || result.contains("${mv_name} not chose") success = success && stepSuccess } + if (!success) { + logger.info("mv_rewrite_all_success_without_check_chosen fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2076,6 +2095,9 @@ class Suite implements GroovyInterceptable { def each_result = splitResult.length == 2 ? splitResult[0].contains(mv_name) : false success = success || (result.contains("(${mv_name})") || each_result) } + if (!success) { + logger.info("mv_rewrite_any_success_without_check_chosen fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2088,6 +2110,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { success = success || result.contains("${mv_name} chose") || result.contains("${mv_name} not chose") } + if (!success) { + logger.info("mv_rewrite_any_success_without_check_chosen fail =" + result) + } Assert.assertEquals(true, success) } } @@ -2146,6 +2171,9 @@ class Suite implements GroovyInterceptable { boolean stepFail = !result.contains("(${mv_name})") fail = fail && stepFail } + if (!fail) { + logger.info("mv_rewrite_all_fail =" + result) + } Assert.assertEquals(true, fail) } } @@ -2159,6 +2187,9 @@ class Suite implements GroovyInterceptable { boolean stepFail = result.contains("${mv_name} fail") fail = fail && stepFail } + if (!fail) { + logger.info("mv_rewrite_all_fail =" + result) + } Assert.assertEquals(true, fail) } } @@ -2176,6 +2207,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { fail = fail || !result.contains("(${mv_name})") } + if (!fail) { + logger.info("mv_rewrite_any_fail =" + result) + } Assert.assertEquals(true, fail) } } @@ -2188,6 +2222,9 @@ class Suite implements GroovyInterceptable { for (String mv_name : mv_names) { fail = fail || result.contains("${mv_name} fail") } + if (!fail) { + logger.info("mv_rewrite_any_fail =" + result) + } Assert.assertEquals(true, fail) } } diff --git a/regression-test/suites/auth_p0/test_select_column_auth.groovy b/regression-test/suites/auth_p0/test_select_column_auth.groovy index 52f1dc02697dd4..36cc2a0a09cf1c 100644 --- a/regression-test/suites/auth_p0/test_select_column_auth.groovy +++ b/regression-test/suites/auth_p0/test_select_column_auth.groovy @@ -130,6 +130,10 @@ suite("test_select_column_auth","p0,auth") { sql """grant select_priv(sum_id) on ${dbName}.${mtmv_name} to ${user}""" sql """grant select_priv(id) on ${dbName}.${tableName} to ${user}""" connect(user, "${pwd}", context.config.jdbcUrl) { + def show_grants = sql """show grants;""" + logger.info("show grants:" + show_grants.toString()) + // If exec on fe follower, wait meta data is ready on follower + Thread.sleep(2000) sql "SET enable_materialized_view_rewrite=true" explain { sql("""select username, sum(id) from ${dbName}.${tableName} group by username""") diff --git a/regression-test/suites/mv_p0/unique/unique_rewrite.groovy b/regression-test/suites/mv_p0/unique/unique_rewrite.groovy index e8c3dd05f80c92..1e8a37c70919ba 100644 --- a/regression-test/suites/mv_p0/unique/unique_rewrite.groovy +++ b/regression-test/suites/mv_p0/unique/unique_rewrite.groovy @@ -96,6 +96,10 @@ suite("mv_on_unique_table") { AS ${mv1} """) + + def desc_all_mv1 = sql """desc lineitem_2_uniq all;""" + logger.info("desc mv1 is: " + desc_all_mv1.toString()) + explain { sql("""${query1}""") check {result -> @@ -124,6 +128,11 @@ suite("mv_on_unique_table") { AS ${mv2} """) + + def desc_all_mv2 = sql """desc lineitem_2_uniq all;""" + logger.info("desc mv2 is" + desc_all_mv2) + // If exec on fe follower, wait meta data is ready on follower + Thread.sleep(2000) explain { sql("""${query2}""") check {result -> diff --git a/regression-test/suites/nereids_rules_p0/mv/create_part_and_up/range_date_part_up_rewrite.groovy b/regression-test/suites/nereids_rules_p0/mv/create_part_and_up/range_date_part_up_rewrite.groovy index 88d8ad6ea2d84d..35d4a60e6255bb 100644 --- a/regression-test/suites/nereids_rules_p0/mv/create_part_and_up/range_date_part_up_rewrite.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/create_part_and_up/range_date_part_up_rewrite.groovy @@ -169,7 +169,7 @@ suite("mtmv_range_date_part_up_rewrite") { for (int i = 0; i < mv_name_list.size(); i++) { def job_name = getJobName(db, mv_name_list[i]) waitingMTMVTaskFinished(job_name) - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } @@ -178,38 +178,38 @@ suite("mtmv_range_date_part_up_rewrite") { sql """insert into lineitem_range_date_union values (1, null, 3, 1, 5.5, 6.5, 7.5, 8.5, 'o', 'k', '2023-10-18', '2023-10-18', 'a', 'b', 'yyyyyyyyy', '2023-11-01')""" for (int i = 0; i < mv_name_list.size(); i++) { - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } for (int i = 0; i < mv_name_list.size(); i++) { sql """refresh MATERIALIZED VIEW ${mv_name_list[i]} auto;""" - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } sql """insert into lineitem_range_date_union values (2, null, 3, 1, 5.5, 6.5, 7.5, 8.5, 'o', 'k', '2023-10-18', '2023-10-18', 'a', 'b', 'yyyyyyyyy', '2023-11-01');""" for (int i = 0; i < mv_name_list.size(); i++) { - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } for (int i = 0; i < mv_name_list.size(); i++) { sql """refresh MATERIALIZED VIEW ${mv_name_list[i]} auto;""" - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } sql """ALTER TABLE lineitem_range_date_union DROP PARTITION IF EXISTS p4 FORCE""" for (int i = 0; i < mv_name_list.size(); i++) { - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } for (int i = 0; i < mv_name_list.size(); i++) { sql """refresh MATERIALIZED VIEW ${mv_name_list[i]} auto;""" - mv_rewrite_success(query_stmt_list[i], mv_name_list[i]) + mv_rewrite_any_success(query_stmt_list[i], mv_name_list) compare_res(query_stmt_list[i] + " order by 1,2,3") } diff --git a/regression-test/suites/nereids_rules_p0/mv/is_in_debug_mode/is_in_debug_mode.groovy b/regression-test/suites/nereids_rules_p0/mv/is_in_debug_mode/is_in_debug_mode.groovy index 15d93e32f65dc2..f973d031adeee0 100644 --- a/regression-test/suites/nereids_rules_p0/mv/is_in_debug_mode/is_in_debug_mode.groovy +++ b/regression-test/suites/nereids_rules_p0/mv/is_in_debug_mode/is_in_debug_mode.groovy @@ -83,7 +83,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv1" + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set skip_delete_sign = false;""" @@ -99,7 +101,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv2" + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set skip_storage_engine_merge = false;""" @@ -115,7 +119,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv3: " + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set skip_delete_bitmap = false;""" @@ -131,7 +137,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv4" + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set skip_delete_predicate = false;""" @@ -147,7 +155,9 @@ suite("is_in_debug_mode") { AS select * from orders where o_orderkey > 2; """ } catch (Exception e) { - Assert.assertTrue(e.getMessage().contains("because is in debug mode")) + def message = e.getMessage() + logger.info("test_create_mv5" + message) + Assert.assertTrue(message.contains("because is in debug mode")) } sql """set show_hidden_columns = false;""" diff --git a/regression-test/suites/nereids_rules_p0/mv/nested/nested_mv_delete.groovy b/regression-test/suites/nereids_rules_p0/mv/nested/nested_mv_delete.groovy new file mode 100644 index 00000000000000..e7556094e71c10 --- /dev/null +++ b/regression-test/suites/nereids_rules_p0/mv/nested/nested_mv_delete.groovy @@ -0,0 +1,77 @@ +package mv.nested +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("nested_mv_delete") { + + String db = context.config.getDbNameByFile(context.file) + sql "use ${db}" + sql "set runtime_filter_mode=OFF"; + sql "SET ignore_shape_nodes='PhysicalDistribute,PhysicalProject'" + + sql """ + drop table if exists orders_1 + """ + + sql """ + CREATE TABLE IF NOT EXISTS orders_1 ( + o_orderkey INTEGER NOT NULL, + o_custkey INTEGER NOT NULL, + o_orderstatus CHAR(1) NOT NULL, + o_totalprice DECIMALV3(15,2) NOT NULL, + o_orderdate DATE NOT NULL, + o_orderpriority CHAR(15) NOT NULL, + o_clerk CHAR(15) NOT NULL, + o_shippriority INTEGER NOT NULL, + o_comment VARCHAR(79) NOT NULL, + public_col INT NULL + ) + DUPLICATE KEY(o_orderkey, o_custkey) + DISTRIBUTED BY HASH(o_orderkey) BUCKETS 3 + PROPERTIES ( + "replication_num" = "1" + ); + """ + + sql """ + insert into orders_1 values + (1, 1, 'o', 9.5, '2023-12-08', 'a', 'b', 1, 'yy', 1), + (1, 1, 'o', 10.5, '2023-12-08', 'a', 'b', 1, 'yy', null), + (2, 1, 'o', 11.5, '2023-12-09', 'a', 'b', 1, 'yy', 2), + (3, 1, 'o', 12.5, '2023-12-10', 'a', 'b', 1, 'yy', null), + (3, 1, 'o', 33.5, '2023-12-10', 'a', 'b', 1, 'yy', 3), + (4, 2, 'o', 43.2, '2023-12-11', 'c','d',2, 'mm', null), + (5, 2, 'o', 56.2, '2023-12-12', 'c','d',2, 'mi', 4), + (5, 2, 'o', 1.2, '2023-12-12', 'c','d',2, 'mi', null); + """ + + sql """alter table orders_1 modify column o_comment set stats ('row_count'='8');""" + + + create_async_mv(db, "mv_level_1", """ + select * from orders_1; + """) + + create_async_mv(db, "mv_level_2", """ + select * from mv_level_1; + """) + + sql """drop materialized view mv_level_1;""" + + order_qt_query_after_delete "select * from mv_level_2" + sql """ DROP MATERIALIZED VIEW IF EXISTS mv_level_2""" +} From 5018312dea36338250ffefd6b9d384b95689bb64 Mon Sep 17 00:00:00 2001 From: Mryange Date: Tue, 24 Dec 2024 14:24:55 +0800 Subject: [PATCH 74/82] [fix](local exchange) Use tokens to ensure that try_dequeue maintains strict order. (#45741) ### What problem does this PR solve? The previously used moodycamel::ConcurrentQueue does not guarantee that the enqueue order matches the dequeue order, even when there is only a single producer and a single consumer. Refer to this issue: https://github.com/cameron314/concurrentqueue/issues/316 We can use tokens to ensure the correct order. ### Release note None ### Check List (For Author) - Test - [ ] Regression test - [ ] Unit Test - [ ] Manual test (add detailed scripts or steps below) - [x] No need to test or manual test. Explain why: - [ ] This is a refactor/code format and no logic has been changed. - [x] Previous test can cover this change. - [ ] No code files have been changed. - [ ] Other reason - Behavior changed: - [x] No. - [ ] Yes. - Does this need documentation? - [x] No. - [ ] Yes. ### Check List (For Reviewer who merge this PR) - [ ] Confirm the release note - [ ] Confirm test cases - [ ] Confirm document - [ ] Add branch pick label --- .../pipeline/local_exchange/local_exchanger.h | 5 +- be/test/vec/exec/concurrent_queue_order.cpp | 109 ++++++++++++++++++ 2 files changed, 112 insertions(+), 2 deletions(-) create mode 100644 be/test/vec/exec/concurrent_queue_order.cpp diff --git a/be/src/pipeline/local_exchange/local_exchanger.h b/be/src/pipeline/local_exchange/local_exchanger.h index d6871b2ba97cc3..2ab1c8627228a4 100644 --- a/be/src/pipeline/local_exchange/local_exchanger.h +++ b/be/src/pipeline/local_exchange/local_exchanger.h @@ -124,12 +124,13 @@ template struct BlockQueue { std::atomic eos = false; moodycamel::ConcurrentQueue data_queue; + moodycamel::ProducerToken ptok {data_queue}; BlockQueue() : eos(false), data_queue(moodycamel::ConcurrentQueue()) {} BlockQueue(BlockQueue&& other) : eos(other.eos.load()), data_queue(std::move(other.data_queue)) {} inline bool enqueue(BlockType const& item) { if (!eos) { - if (!data_queue.enqueue(item)) [[unlikely]] { + if (!data_queue.enqueue(ptok, item)) [[unlikely]] { throw Exception(ErrorCode::INTERNAL_ERROR, "Exception occurs in data queue [size = {}] of local exchange.", data_queue.size_approx()); @@ -141,7 +142,7 @@ struct BlockQueue { inline bool enqueue(BlockType&& item) { if (!eos) { - if (!data_queue.enqueue(std::move(item))) [[unlikely]] { + if (!data_queue.enqueue(ptok, std::move(item))) [[unlikely]] { throw Exception(ErrorCode::INTERNAL_ERROR, "Exception occurs in data queue [size = {}] of local exchange.", data_queue.size_approx()); diff --git a/be/test/vec/exec/concurrent_queue_order.cpp b/be/test/vec/exec/concurrent_queue_order.cpp new file mode 100644 index 00000000000000..bc3e3c7ee6a6a7 --- /dev/null +++ b/be/test/vec/exec/concurrent_queue_order.cpp @@ -0,0 +1,109 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include + +#include +#include + +namespace doris::vectorized { + +class ConcurrentQueueOrder : public testing::Test { +public: + ConcurrentQueueOrder() = default; + ~ConcurrentQueueOrder() override = default; +}; +// The previously used moodycamel::ConcurrentQueue does not guarantee that the enqueue order matches the dequeue order, +// even when there is only a single producer and a single consumer. +// Refer to this issue: https://github.com/cameron314/concurrentqueue/issues/316 +// We can use tokens to ensure the correct order. +TEST_F(ConcurrentQueueOrder, test_not_guarantee_order) { + { + moodycamel::ConcurrentQueue data_queue; + int num = 0; + std::mutex m; + std::atomic_bool flag = true; + + auto task = [&](int thread_id) { + while (flag) { + std::lock_guard lc {m}; + data_queue.enqueue(num++); + } + }; + std::thread input1(task, 0); + std::thread input2(task, 1); + std::thread input3(task, 2); + + std::this_thread::sleep_for(std::chrono::milliseconds(50)); + flag = false; + + input3.join(); + input1.join(); + input2.join(); + + std::cout << "queue size " << data_queue.size_approx() << "\n"; + std::vector outputs; + int output; + while (data_queue.try_dequeue(output)) { + outputs.push_back(output); + } + + EXPECT_FALSE(std::is_sorted(outputs.begin(), outputs.end())); + std::cout << "output is sorted : " << std::is_sorted(outputs.begin(), outputs.end()) + << "\n"; + } +} + +TEST_F(ConcurrentQueueOrder, test_guarantee_order) { + { + moodycamel::ConcurrentQueue data_queue; + moodycamel::ProducerToken ptok {data_queue}; + int num = 0; + std::mutex m; + std::atomic_bool flag = true; + + auto task = [&](int thread_id) { + while (flag) { + std::lock_guard lc {m}; + data_queue.enqueue(ptok, num++); + } + }; + std::thread input1(task, 0); + std::thread input2(task, 1); + std::thread input3(task, 2); + + std::this_thread::sleep_for(std::chrono::milliseconds(50)); + flag = false; + + input3.join(); + input1.join(); + input2.join(); + + std::cout << "queue size " << data_queue.size_approx() << "\n"; + std::vector outputs; + int output; + while (data_queue.try_dequeue(output)) { + outputs.push_back(output); + } + + EXPECT_TRUE(std::is_sorted(outputs.begin(), outputs.end())); + std::cout << "output is sorted : " << std::is_sorted(outputs.begin(), outputs.end()) + << "\n"; + } +} +} // namespace doris::vectorized From ff690e446bb9bfa9a7387e0ceec3bc55143499ba Mon Sep 17 00:00:00 2001 From: shuke Date: Tue, 24 Dec 2024 15:08:35 +0800 Subject: [PATCH 75/82] [regression-test](fix) forbid enable_table_size_correctness_check tmp (#45851) ### What problem does this PR solve? Issue Number: close #xxx Related PR: #xxx Problem Summary: forbid enable_table_size_correctness_check tmp, which cause regression pipeline core. --- regression-test/pipeline/p0/conf/be.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regression-test/pipeline/p0/conf/be.conf b/regression-test/pipeline/p0/conf/be.conf index 760b8762430bd7..0b73375b3fbdd3 100644 --- a/regression-test/pipeline/p0/conf/be.conf +++ b/regression-test/pipeline/p0/conf/be.conf @@ -71,7 +71,7 @@ be_proc_monitor_interval_ms = 30000 webserver_num_workers = 128 pipeline_task_leakage_detect_period_sec=1 crash_in_memory_tracker_inaccurate = true -enable_table_size_correctness_check=true +#enable_table_size_correctness_check=true enable_brpc_connection_check=true # enable download small files in batch, see apache/doris#45061 for details From e3b18b588ba0f737cf3a1c802e7cc444ec9d71e8 Mon Sep 17 00:00:00 2001 From: abmdocrt Date: Tue, 24 Dec 2024 15:16:03 +0800 Subject: [PATCH 76/82] [fix](recycler) Fix CountdownEvent error and hang (#45760) Fix CountdownEvent error "Invoking add_count() after wait() was invoked" --------- Co-authored-by: Gavin Chou --- cloud/src/common/simple_thread_pool.h | 5 +- cloud/src/recycler/recycler.cpp | 1 - cloud/src/recycler/sync_executor.h | 26 +++++++- cloud/test/util_test.cpp | 90 +++++++++++++++++++++++++++ 4 files changed, 118 insertions(+), 4 deletions(-) diff --git a/cloud/src/common/simple_thread_pool.h b/cloud/src/common/simple_thread_pool.h index e18d6787bf7a46..37a4cedbdadd73 100644 --- a/cloud/src/common/simple_thread_pool.h +++ b/cloud/src/common/simple_thread_pool.h @@ -19,6 +19,7 @@ #include #include +#include #include #include #include @@ -154,8 +155,10 @@ class SimpleThreadPool { } try { job(); + } catch (const std::exception& e) { + std::cerr << "exception happened when execute job. err: " << e.what() << std::endl; } catch (...) { - // do nothing + std::cerr << "exception happened when execute job." << std::endl; } } } diff --git a/cloud/src/recycler/recycler.cpp b/cloud/src/recycler/recycler.cpp index 6877d7e433b253..ca22b28e031c91 100644 --- a/cloud/src/recycler/recycler.cpp +++ b/cloud/src/recycler/recycler.cpp @@ -1278,7 +1278,6 @@ int InstanceRecycler::recycle_tablets(int64_t table_id, int64_t index_id, int64_ LOG_WARNING("failed to recycle tablet").tag("instance_id", instance_id_); return -1; } - sync_executor.reset(); if (tablet_keys.empty() && tablet_idx_keys.empty()) return 0; // sort the vector using key's order std::sort(tablet_keys.begin(), tablet_keys.end(), diff --git a/cloud/src/recycler/sync_executor.h b/cloud/src/recycler/sync_executor.h index c84e5e22467a9c..909f36a56c4c9a 100644 --- a/cloud/src/recycler/sync_executor.h +++ b/cloud/src/recycler/sync_executor.h @@ -18,10 +18,12 @@ #pragma once #include +#include #include #include #include +#include #include #include @@ -48,10 +50,12 @@ class SyncExecutor { return *this; } std::vector when_all(bool* finished) { + std::unique_ptr> defer((int*)0x01, [&](int*) { _reset(); }); timespec current_time; auto current_time_second = time(nullptr); current_time.tv_sec = current_time_second + 300; current_time.tv_nsec = 0; + // Wait for all tasks to complete while (0 != _count.timed_wait(current_time)) { current_time.tv_sec += 300; LOG(WARNING) << _name_tag << " has already taken 5 min, cost: " @@ -65,11 +69,26 @@ class SyncExecutor { *finished = false; return res; } - res.emplace_back((*task).get()); + size_t max_wait_ms = 10000; + TEST_SYNC_POINT_CALLBACK("SyncExecutor::when_all.set_wait_time", &max_wait_ms); + // _count.timed_wait has already ensured that all tasks are completed. + // The 10 seconds here is just waiting for the task results to be returned, + // so 10 seconds is more than enough. + auto status = task->wait_for(max_wait_ms); + if (status == std::future_status::ready) { + res.emplace_back(task->get()); + } else { + *finished = false; + LOG(WARNING) << _name_tag << " task timed out after 10 seconds"; + return res; + } } return res; } - void reset() { + +private: + void _reset() { + _count.reset(0); _res.clear(); _stop_token = false; } @@ -98,6 +117,9 @@ class SyncExecutor { } _pro.set_value(std::move(t)); } + std::future_status wait_for(size_t milliseconds) { + return _fut.wait_for(std::chrono::milliseconds(milliseconds)); + } bool valid() { return _valid; } T get() { return _fut.get(); } diff --git a/cloud/test/util_test.cpp b/cloud/test/util_test.cpp index c88ef555f82806..e505b2b99a52da 100644 --- a/cloud/test/util_test.cpp +++ b/cloud/test/util_test.cpp @@ -18,6 +18,7 @@ #include "recycler/util.h" #include +#include #include #include #include @@ -28,6 +29,7 @@ #include "common/logging.h" #include "common/simple_thread_pool.h" #include "common/string_util.h" +#include "cpp/sync_point.h" #include "gtest/gtest.h" #include "recycler/recycler.h" #include "recycler/sync_executor.h" @@ -235,3 +237,91 @@ TEST(UtilTest, normal) { std::for_each(res.begin(), res.end(), [&s](auto&& n) { ASSERT_EQ(s, n); }); } } + +TEST(UtilTest, test_add_after_when_all) { + auto f = []() { + auto pool = std::make_shared(config::recycle_pool_parallelism); + pool->start(); + SyncExecutor sync_executor(pool, "test add after when all: inside", + [](int k) { return k != 0; }); + auto f1 = []() { return 0; }; + sync_executor.add(f1); + bool finished = true; + std::vector res = sync_executor.when_all(&finished); + sync_executor.add(f1); + res = sync_executor.when_all(&finished); + EXPECT_EQ(1, res.size()); + EXPECT_EQ(finished, true); + std::for_each(res.begin(), res.end(), [](auto&& n) { EXPECT_EQ(0, n); }); + return 0; + }; + + auto s3_producer_pool = std::make_shared(config::recycle_pool_parallelism); + s3_producer_pool->start(); + SyncExecutor s3_sync_executor(s3_producer_pool, "test add after when all: outside", + [](int k) { return k != 0; }); + s3_sync_executor.add(f); + bool finished = true; + std::vector res = s3_sync_executor.when_all(&finished); + EXPECT_EQ(1, res.size()); + EXPECT_EQ(finished, true); + std::for_each(res.begin(), res.end(), [](auto&& n) { EXPECT_EQ(0, n); }); +} + +TEST(UtilTest, exception) { + auto s3_producer_pool = std::make_shared(config::recycle_pool_parallelism); + s3_producer_pool->start(); + { + SyncExecutor sync_executor(s3_producer_pool, "exception test", + [](int k) { return k != 0; }); + auto f = []() { + throw(std::runtime_error("test exception")); + return 1; + }; + sync_executor.add(f); + bool finished = true; + std::vector res = sync_executor.when_all(&finished); + EXPECT_EQ(0, res.size()); + EXPECT_EQ(finished, false); + std::for_each(res.begin(), res.end(), [](auto&& n) { EXPECT_EQ(1, n); }); + } +} + +TEST(UtilTest, test_sync_executor) { + auto f = []() { + sleep(1); + auto pool = std::make_shared(config::recycle_pool_parallelism); + pool->start(); + SyncExecutor sync_executor(pool, "test sync executor: inside", + [](int k) { return k != 0; }); + auto f1 = []() { return 0; }; + sync_executor.add(f1); + bool finished = true; + std::vector res = sync_executor.when_all(&finished); + sync_executor.add(f1); + res = sync_executor.when_all(&finished); + EXPECT_EQ(1, res.size()); + EXPECT_EQ(finished, true); + std::for_each(res.begin(), res.end(), [](auto&& n) { EXPECT_EQ(0, n); }); + return 0; + }; + std::mutex go_mutex; + + auto* sp = doris::SyncPoint::get_instance(); + sp->set_call_back("SyncExecutor::when_all.set_wait_time", [&](auto&& args) { + std::unique_lock _lock(go_mutex); + auto max_wait_time = *doris::try_any_cast(args[0]); + max_wait_time = 100; + }); + + auto s3_producer_pool = std::make_shared(config::recycle_pool_parallelism); + s3_producer_pool->start(); + SyncExecutor s3_sync_executor(s3_producer_pool, "test sync executor: outside", + [](int k) { return k != 0; }); + s3_sync_executor.add(f); + bool finished = true; + std::vector res = s3_sync_executor.when_all(&finished); + EXPECT_EQ(1, res.size()); + EXPECT_EQ(finished, true); + std::for_each(res.begin(), res.end(), [](auto&& n) { EXPECT_EQ(0, n); }); +} \ No newline at end of file From cd54ead276c346e6345138428a306c363728e8a0 Mon Sep 17 00:00:00 2001 From: shuke Date: Tue, 24 Dec 2024 15:20:00 +0800 Subject: [PATCH 77/82] [regression-test](fix) fix regression-test/suites/node_p0/test_frontend.groovy nonConcurrent typo bug (#45852) --- regression-test/suites/node_p0/test_frontend.groovy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/regression-test/suites/node_p0/test_frontend.groovy b/regression-test/suites/node_p0/test_frontend.groovy index 2ccc432460bd9c..99d6068f7ba945 100644 --- a/regression-test/suites/node_p0/test_frontend.groovy +++ b/regression-test/suites/node_p0/test_frontend.groovy @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -suite("test_frontend", "nonconcurrent") { +suite("test_frontend", "nonConcurrent") { def address = "127.0.0.1" def notExistPort = 12345 From 8e4219ce8edf5dade7bd6a293e8817d6ecacd026 Mon Sep 17 00:00:00 2001 From: abmdocrt Date: Tue, 24 Dec 2024 15:22:56 +0800 Subject: [PATCH 78/82] [Enhancement](config) Modify cloud default stale rowset recycle time (#45460) Modify cloud default stale rowset recycle time from 3h to 0.5h, aligned with the computing-storage integration version. --- cloud/src/common/config.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cloud/src/common/config.h b/cloud/src/common/config.h index 5befb0f8b2105e..03e2b63c72a7f6 100644 --- a/cloud/src/common/config.h +++ b/cloud/src/common/config.h @@ -59,7 +59,7 @@ CONF_mInt64(recycle_interval_seconds, "3600"); CONF_mInt64(retention_seconds, "259200"); // 72h, global retention time CONF_Int32(recycle_concurrency, "16"); CONF_Int32(recycle_job_lease_expired_ms, "60000"); -CONF_mInt64(compacted_rowset_retention_seconds, "10800"); // 3h +CONF_mInt64(compacted_rowset_retention_seconds, "1800"); // 0.5h CONF_mInt64(dropped_index_retention_seconds, "10800"); // 3h CONF_mInt64(dropped_partition_retention_seconds, "10800"); // 3h // Which instance should be recycled. If empty, recycle all instances. From 1392084da8d75a42dbaf4d7fb051a5d6ba361257 Mon Sep 17 00:00:00 2001 From: Sridhar R Manikarnike Date: Tue, 24 Dec 2024 13:06:27 +0530 Subject: [PATCH 79/82] [Enhancement] (nereids)implement helpCommand in nereids (#44819) Issue Number: close #42824 --- .../org/apache/doris/nereids/DorisParser.g4 | 8 +- .../nereids/parser/LogicalPlanBuilder.java | 8 ++ .../doris/nereids/trees/plans/PlanType.java | 1 + .../trees/plans/commands/HelpCommand.java | 132 ++++++++++++++++++ .../trees/plans/visitor/CommandVisitor.java | 5 + .../nereids_p0/show/test_help_command.groovy | 34 +++++ 6 files changed, 186 insertions(+), 2 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/HelpCommand.java create mode 100644 regression-test/suites/nereids_p0/show/test_help_command.groovy diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index 37e1c68cefb91c..eba16aa6127388 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -63,6 +63,7 @@ statementBase | supportedRecoverStatement #supportedRecoverStatementAlias | supportedAdminStatement #supportedAdminStatementAlias | supportedUseStatement #supportedUseStatementAlias + | supportedOtherStatement #supportedOtherStatementAlias | unsupportedStatement #unsupported ; @@ -290,9 +291,12 @@ supportedLoadStatement | createRoutineLoad #createRoutineLoadAlias ; -unsupportedOtherStatement +supportedOtherStatement : HELP mark=identifierOrText #help - | INSTALL PLUGIN FROM source=identifierOrText properties=propertyClause? #installPlugin + ; + +unsupportedOtherStatement + : INSTALL PLUGIN FROM source=identifierOrText properties=propertyClause? #installPlugin | UNINSTALL PLUGIN name=identifierOrText #uninstallPlugin | LOCK TABLES (lockTable (COMMA lockTable)*)? #lockTables | UNLOCK TABLES #unlockTables diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index d98d0660f5c9cb..54717302493128 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -144,6 +144,7 @@ import org.apache.doris.nereids.DorisParser.GroupingElementContext; import org.apache.doris.nereids.DorisParser.GroupingSetContext; import org.apache.doris.nereids.DorisParser.HavingClauseContext; +import org.apache.doris.nereids.DorisParser.HelpContext; import org.apache.doris.nereids.DorisParser.HintAssignmentContext; import org.apache.doris.nereids.DorisParser.HintStatementContext; import org.apache.doris.nereids.DorisParser.IdentifierContext; @@ -551,6 +552,7 @@ import org.apache.doris.nereids.trees.plans.commands.ExplainCommand; import org.apache.doris.nereids.trees.plans.commands.ExplainCommand.ExplainLevel; import org.apache.doris.nereids.trees.plans.commands.ExportCommand; +import org.apache.doris.nereids.trees.plans.commands.HelpCommand; import org.apache.doris.nereids.trees.plans.commands.LoadCommand; import org.apache.doris.nereids.trees.plans.commands.PauseJobCommand; import org.apache.doris.nereids.trees.plans.commands.PauseMTMVCommand; @@ -5087,6 +5089,12 @@ public LogicalPlan visitShowProcessList(ShowProcessListContext ctx) { return new ShowProcessListCommand(ctx.FULL() != null); } + @Override + public LogicalPlan visitHelp(HelpContext ctx) { + String mark = ctx.mark.getText(); + return new HelpCommand(mark); + } + @Override public LogicalPlan visitSync(SyncContext ctx) { return new SyncCommand(); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index 2860ec10092312..06d2287de537bf 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -268,5 +268,6 @@ public enum PlanType { SHOW_TABLE_CREATION_COMMAND, SHOW_QUERY_PROFILE_COMMAND, SWITCH_COMMAND, + HELP_COMMAND, USE_COMMAND } diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/HelpCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/HelpCommand.java new file mode 100644 index 00000000000000..49acc914eae50a --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/HelpCommand.java @@ -0,0 +1,132 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands; + +import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.ScalarType; +import org.apache.doris.common.AnalysisException; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.ShowResultSet; +import org.apache.doris.qe.ShowResultSetMetaData; +import org.apache.doris.qe.StmtExecutor; +import org.apache.doris.qe.help.HelpModule; +import org.apache.doris.qe.help.HelpTopic; + +import com.google.common.base.Strings; +import com.google.common.collect.Lists; + +import java.util.List; + +/** + * Represents the command for HELP. + */ +public class HelpCommand extends ShowCommand { + private static final List> EMPTY_SET = Lists.newArrayList(); + + private static final ShowResultSetMetaData TOPIC_META_DATA = + ShowResultSetMetaData.builder() + .addColumn(new Column("name", ScalarType.createVarchar(64))) + .addColumn(new Column("description", ScalarType.createVarchar(1000))) + .addColumn(new Column("example", ScalarType.createVarchar(1000))) + .build(); + private static final ShowResultSetMetaData CATEGORY_META_DATA = + ShowResultSetMetaData.builder() + .addColumn(new Column("source_category_name", ScalarType.createVarchar(64))) + .addColumn(new Column("name", ScalarType.createVarchar(64))) + .addColumn(new Column("is_it_category", ScalarType.createVarchar(1))) + .build(); + private static final ShowResultSetMetaData KEYWORD_META_DATA = + ShowResultSetMetaData.builder() + .addColumn(new Column("name", ScalarType.createVarchar(64))) + .addColumn(new Column("is_it_category", ScalarType.createVarchar(1))) + .build(); + + private final String mark; + + public HelpCommand(String mark) { + super(PlanType.HELP_COMMAND); + this.mark = mark; + } + + @Override + public ShowResultSet doRun(ConnectContext ctx, StmtExecutor executor) throws Exception { + if (Strings.isNullOrEmpty(mark)) { + throw new AnalysisException("Help empty info."); + } + HelpModule module = HelpModule.getInstance(); + ShowResultSet resultSet; + + // Get topic + HelpTopic topic = module.getTopic(mark); + // Get by Keyword + if (topic == null) { + List topics = module.listTopicByKeyword(mark); + if (topics.size() == 0) { + // assign to avoid code style problem + topic = null; + } else if (topics.size() == 1) { + topic = module.getTopic(topics.get(0)); + } else { + // Send topic list and category list + List> rows = Lists.newArrayList(); + for (String str : topics) { + rows.add(Lists.newArrayList(str, "N")); + } + List categories = module.listCategoryByName(mark); + for (String str : categories) { + rows.add(Lists.newArrayList(str, "Y")); + } + return new ShowResultSet(KEYWORD_META_DATA, rows); + } + } + if (topic != null) { + resultSet = new ShowResultSet(TOPIC_META_DATA, Lists.>newArrayList( + Lists.newArrayList(topic.getName(), topic.getDescription(), topic.getExample()))); + } else { + List categories = module.listCategoryByName(mark); + if (categories.isEmpty()) { + // If no category match for this name, return + resultSet = new ShowResultSet(KEYWORD_META_DATA, EMPTY_SET); + } else if (categories.size() > 1) { + // Send category list + resultSet = new ShowResultSet(CATEGORY_META_DATA, + Lists.>newArrayList(categories)); + } else { + // Send topic list and sub-category list + List> rows = Lists.newArrayList(); + List topics = module.listTopicByCategory(categories.get(0)); + for (String str : topics) { + rows.add(Lists.newArrayList(str, "N")); + } + List subCategories = module.listCategoryByCategory(categories.get(0)); + for (String str : subCategories) { + rows.add(Lists.newArrayList(str, "Y")); + } + resultSet = new ShowResultSet(KEYWORD_META_DATA, rows); + } + } + return resultSet; + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitHelpCommand(this, context); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index 122e513a08cb57..72dad4abf1d9e8 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -72,6 +72,7 @@ import org.apache.doris.nereids.trees.plans.commands.DropWorkloadPolicyCommand; import org.apache.doris.nereids.trees.plans.commands.ExplainCommand; import org.apache.doris.nereids.trees.plans.commands.ExportCommand; +import org.apache.doris.nereids.trees.plans.commands.HelpCommand; import org.apache.doris.nereids.trees.plans.commands.LoadCommand; import org.apache.doris.nereids.trees.plans.commands.PauseJobCommand; import org.apache.doris.nereids.trees.plans.commands.PauseMTMVCommand; @@ -337,6 +338,10 @@ default R visitShowCreateProcedureCommand(ShowCreateProcedureCommand showCreateP return visitCommand(showCreateProcedureCommand, context); } + default R visitHelpCommand(HelpCommand helpCommand, C context) { + return visitCommand(helpCommand, context); + } + default R visitCreateViewCommand(CreateViewCommand createViewCommand, C context) { return visitCommand(createViewCommand, context); } diff --git a/regression-test/suites/nereids_p0/show/test_help_command.groovy b/regression-test/suites/nereids_p0/show/test_help_command.groovy new file mode 100644 index 00000000000000..21452ce3e46379 --- /dev/null +++ b/regression-test/suites/nereids_p0/show/test_help_command.groovy @@ -0,0 +1,34 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_help_command", "query,help") { + try { + // Test the HELP command for a known topic + checkNereidsExecute("HELP 'CREATE TABLE';") + + // Test the HELP command for an unknown topic + checkNereidsExecute("HELP 'UNKNOWN_TOPIC';") + + // Test the HELP command for a keyword with multiple matches + checkNereidsExecute("HELP 'ALTER';") + } catch (Exception e) { + // Log any exceptions that occur during testing + log.error("Failed to execute HELP command", e) + throw e + } +} + From d41147f0998704e9d511540ce8f1a05a47596d18 Mon Sep 17 00:00:00 2001 From: Sridhar R Manikarnike Date: Tue, 24 Dec 2024 13:06:43 +0530 Subject: [PATCH 80/82] [Enhancement] (nereids)implement showWaringErrorCountCommand in nereids (#45829) Issue Number: close #42751 --- .../org/apache/doris/nereids/DorisParser.g4 | 2 +- .../nereids/parser/LogicalPlanBuilder.java | 8 +++ .../doris/nereids/trees/plans/PlanType.java | 1 + .../ShowWarningErrorCountCommand.java | 64 +++++++++++++++++++ .../trees/plans/visitor/CommandVisitor.java | 5 ++ ...st_show_warning_error_count_command.groovy | 32 ++++++++++ 6 files changed, 111 insertions(+), 1 deletion(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowWarningErrorCountCommand.java create mode 100644 regression-test/suites/nereids_p0/show/test_show_warning_error_count_command.groovy diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index eba16aa6127388..7ca3e9f7b80a28 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -267,6 +267,7 @@ supportedShowStatement | SHOW CREATE MATERIALIZED VIEW mvName=identifier ON tableName=multipartIdentifier #showCreateMaterializedView | SHOW (WARNINGS | ERRORS) limitClause? #showWarningErrors + | SHOW COUNT LEFT_PAREN ASTERISK RIGHT_PAREN (WARNINGS | ERRORS) #showWarningErrorCount | SHOW BACKENDS #showBackends | SHOW REPLICA DISTRIBUTION FROM baseTableRef #showReplicaDistribution | SHOW FULL? TRIGGERS ((FROM | IN) database=multipartIdentifier)? wildWhere? #showTriggers @@ -339,7 +340,6 @@ unsupportedShowStatement | SHOW CATALOG name=identifier #showCatalog | SHOW FULL? (COLUMNS | FIELDS) (FROM | IN) tableName=multipartIdentifier ((FROM | IN) database=multipartIdentifier)? wildWhere? #showColumns - | SHOW COUNT LEFT_PAREN ASTERISK RIGHT_PAREN (WARNINGS | ERRORS) #showWaringErrorCount | SHOW LOAD WARNINGS ((((FROM | IN) database=multipartIdentifier)? wildWhere? limitClause?) | (ON url=STRING_LITERAL)) #showLoadWarings | SHOW STREAM? LOAD ((FROM | IN) database=multipartIdentifier)? wildWhere? diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index 54717302493128..623d6537ccdceb 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -293,6 +293,7 @@ import org.apache.doris.nereids.DorisParser.ShowTriggersContext; import org.apache.doris.nereids.DorisParser.ShowVariablesContext; import org.apache.doris.nereids.DorisParser.ShowViewContext; +import org.apache.doris.nereids.DorisParser.ShowWarningErrorCountContext; import org.apache.doris.nereids.DorisParser.ShowWarningErrorsContext; import org.apache.doris.nereids.DorisParser.ShowWhitelistContext; import org.apache.doris.nereids.DorisParser.SimpleColumnDefContext; @@ -617,6 +618,7 @@ import org.apache.doris.nereids.trees.plans.commands.ShowTriggersCommand; import org.apache.doris.nereids.trees.plans.commands.ShowVariablesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowViewCommand; +import org.apache.doris.nereids.trees.plans.commands.ShowWarningErrorCountCommand; import org.apache.doris.nereids.trees.plans.commands.ShowWarningErrorsCommand; import org.apache.doris.nereids.trees.plans.commands.ShowWhiteListCommand; import org.apache.doris.nereids.trees.plans.commands.SyncCommand; @@ -5151,6 +5153,12 @@ public LogicalPlan visitAdminCheckTablets(AdminCheckTabletsContext ctx) { return new AdminCheckTabletsCommand(tabletIdLists, properties); } + @Override + public LogicalPlan visitShowWarningErrorCount(ShowWarningErrorCountContext ctx) { + boolean isWarning = ctx.WARNINGS() != null; + return new ShowWarningErrorCountCommand(isWarning); + } + @Override public LogicalPlan visitShowStatus(ShowStatusContext ctx) { String scope = null; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index 06d2287de537bf..37216fa652ba08 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -251,6 +251,7 @@ public enum PlanType { SHOW_AUTHORS_COMMAND, SHOW_VIEW_COMMAND, SHOW_WARNING_ERRORS_COMMAND, + SHOW_WARNING_ERROR_COUNT_COMMAND, SHOW_WHITE_LIST_COMMAND, SHOW_TABLETS_BELONG_COMMAND, SYNC_COMMAND, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowWarningErrorCountCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowWarningErrorCountCommand.java new file mode 100644 index 00000000000000..ef1d36b30fc4e0 --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ShowWarningErrorCountCommand.java @@ -0,0 +1,64 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands; + +import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.PrimitiveType; +import org.apache.doris.catalog.ScalarType; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.ShowResultSet; +import org.apache.doris.qe.ShowResultSetMetaData; +import org.apache.doris.qe.StmtExecutor; + +import com.google.common.collect.Lists; + +import java.util.List; + +/** + * Represents the command for SHOW COUNT(*) WARNINGS | ERRORS. + */ +public class ShowWarningErrorCountCommand extends ShowCommand { + private static final ShowResultSetMetaData META_DATA = + ShowResultSetMetaData.builder() + .addColumn(new Column("COUNT", ScalarType.createType(PrimitiveType.BIGINT))) + .build(); + private final boolean isWarning; + + public ShowWarningErrorCountCommand(boolean isWarning) { + super(PlanType.SHOW_WARNING_ERROR_COUNT_COMMAND); + this.isWarning = isWarning; + } + + @Override + public ShowResultSet doRun(ConnectContext ctx, StmtExecutor executor) { + List> rowSet = Lists.newArrayList(); + + return new ShowResultSet(META_DATA, rowSet); + } + + public boolean isWarning() { + return isWarning; + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitShowWarningErrorCountCommand(this, context); + } +} diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index 72dad4abf1d9e8..d6ede656542356 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -137,6 +137,7 @@ import org.apache.doris.nereids.trees.plans.commands.ShowTriggersCommand; import org.apache.doris.nereids.trees.plans.commands.ShowVariablesCommand; import org.apache.doris.nereids.trees.plans.commands.ShowViewCommand; +import org.apache.doris.nereids.trees.plans.commands.ShowWarningErrorCountCommand; import org.apache.doris.nereids.trees.plans.commands.ShowWarningErrorsCommand; import org.apache.doris.nereids.trees.plans.commands.ShowWhiteListCommand; import org.apache.doris.nereids.trees.plans.commands.SyncCommand; @@ -310,6 +311,10 @@ default R visitCallCommand(CallCommand callCommand, C context) { return visitCommand(callCommand, context); } + default R visitShowWarningErrorCountCommand(ShowWarningErrorCountCommand showWarnErrorCountCommand, C context) { + return visitCommand(showWarnErrorCountCommand, context); + } + default R visitShowSyncJobCommand(ShowSyncJobCommand showSyncJobCommand, C context) { return visitCommand(showSyncJobCommand, context); } diff --git a/regression-test/suites/nereids_p0/show/test_show_warning_error_count_command.groovy b/regression-test/suites/nereids_p0/show/test_show_warning_error_count_command.groovy new file mode 100644 index 00000000000000..d97426c8822fd8 --- /dev/null +++ b/regression-test/suites/nereids_p0/show/test_show_warning_error_count_command.groovy @@ -0,0 +1,32 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_show_warning_error_count_command", "nereids_p0") { + + try { + // Generate some warnings + sql("SELECT 1/0;") // This should generate a warning or error? + + // Test SHOW COUNT(*) WARNINGS + checkNereidsExecute("""SHOW COUNT(*) WARNINGS""") + + // Test SHOW COUNT(*) ERRORS + checkNereidsExecute("""SHOW COUNT(*) ERRORS""") + } finally { + // Clean up if needed + } +} From d0f09fb36f638ed312fdf23011d3ed9b3a35737d Mon Sep 17 00:00:00 2001 From: Sridhar R Manikarnike Date: Tue, 24 Dec 2024 13:06:59 +0530 Subject: [PATCH 81/82] [Enhancement] (nereids)implement DropStoragePolicyCommand in nereids (#44825) Issue Number: close #42626 --- .../org/apache/doris/nereids/DorisParser.g4 | 2 +- .../nereids/parser/LogicalPlanBuilder.java | 9 +++ .../doris/nereids/trees/plans/PlanType.java | 1 + .../commands/DropStoragePolicyCommand.java | 68 +++++++++++++++++++ .../trees/plans/visitor/CommandVisitor.java | 5 ++ .../org/apache/doris/policy/PolicyMgr.java | 16 +++-- .../test_drop_storage_policy_command.groovy | 63 +++++++++++++++++ 7 files changed, 157 insertions(+), 7 deletions(-) create mode 100644 fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropStoragePolicyCommand.java create mode 100644 regression-test/suites/nereids_p0/test_drop_storage_policy_command.groovy diff --git a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 index 7ca3e9f7b80a28..f5730bddd549ee 100644 --- a/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 +++ b/fe/fe-core/src/main/antlr4/org/apache/doris/nereids/DorisParser.g4 @@ -220,6 +220,7 @@ supportedDropStatement | DROP ROLE (IF EXISTS)? name=identifier #dropRole | DROP SQL_BLOCK_RULE (IF EXISTS)? identifierSeq #dropSqlBlockRule | DROP USER (IF EXISTS)? userIdentify #dropUser + | DROP STORAGE POLICY (IF EXISTS)? name=identifier #dropStoragePolicy | DROP WORKLOAD GROUP (IF EXISTS)? name=identifierOrText #dropWorkloadGroup | DROP CATALOG (IF EXISTS)? name=identifier #dropCatalog | DROP FILE name=STRING_LITERAL @@ -703,7 +704,6 @@ unsupportedDropStatement | DROP ROW POLICY (IF EXISTS)? policyName=identifier ON tableName=multipartIdentifier (FOR (userIdentify | ROLE roleName=identifier))? #dropRowPolicy - | DROP STORAGE POLICY (IF EXISTS)? name=identifier #dropStoragePolicy | DROP STAGE (IF EXISTS)? name=identifier #dropStage ; diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java index 623d6537ccdceb..08a5078214da81 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/parser/LogicalPlanBuilder.java @@ -130,6 +130,7 @@ import org.apache.doris.nereids.DorisParser.DropRepositoryContext; import org.apache.doris.nereids.DorisParser.DropRoleContext; import org.apache.doris.nereids.DorisParser.DropSqlBlockRuleContext; +import org.apache.doris.nereids.DorisParser.DropStoragePolicyContext; import org.apache.doris.nereids.DorisParser.DropUserContext; import org.apache.doris.nereids.DorisParser.DropWorkloadGroupContext; import org.apache.doris.nereids.DorisParser.DropWorkloadPolicyContext; @@ -547,6 +548,7 @@ import org.apache.doris.nereids.trees.plans.commands.DropRepositoryCommand; import org.apache.doris.nereids.trees.plans.commands.DropRoleCommand; import org.apache.doris.nereids.trees.plans.commands.DropSqlBlockRuleCommand; +import org.apache.doris.nereids.trees.plans.commands.DropStoragePolicyCommand; import org.apache.doris.nereids.trees.plans.commands.DropUserCommand; import org.apache.doris.nereids.trees.plans.commands.DropWorkloadGroupCommand; import org.apache.doris.nereids.trees.plans.commands.DropWorkloadPolicyCommand; @@ -5015,6 +5017,13 @@ public LogicalPlan visitCreateEncryptkey(CreateEncryptkeyContext ctx) { stripQuotes(ctx.STRING_LITERAL().getText())); } + @Override + public LogicalPlan visitDropStoragePolicy(DropStoragePolicyContext ctx) { + String policyName = ctx.name.getText(); + boolean ifExists = ctx.EXISTS() != null; + return new DropStoragePolicyCommand(policyName, ifExists); + } + @Override public LogicalPlan visitDropEncryptkey(DropEncryptkeyContext ctx) { List nameParts = visitMultipartIdentifier(ctx.name); diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java index 37216fa652ba08..76afbbdabf0ab6 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/PlanType.java @@ -201,6 +201,7 @@ public enum PlanType { EXECUTE_COMMAND, DROP_SQL_BLOCK_RULE_COMMAND, DROP_USER_COMMAND, + DROP_STORAGE_POLICY_COMMAND, DROP_WORKLOAD_GROUP_NAME, DROP_WORKLOAD_POLICY_COMMAND, ADMIN_SET_TABLE_STATUS_COMMAND, diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropStoragePolicyCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropStoragePolicyCommand.java new file mode 100644 index 00000000000000..d5175eb2a6179b --- /dev/null +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/DropStoragePolicyCommand.java @@ -0,0 +1,68 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.nereids.trees.plans.commands; + +import org.apache.doris.catalog.Env; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.common.ErrorReport; +import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.nereids.trees.plans.PlanType; +import org.apache.doris.nereids.trees.plans.visitor.PlanVisitor; +import org.apache.doris.policy.DropPolicyLog; +import org.apache.doris.policy.PolicyTypeEnum; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.StmtExecutor; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +/** + * drop storage policy command + */ +public class DropStoragePolicyCommand extends DropCommand { + public static final Logger LOG = LogManager.getLogger(DropStoragePolicyCommand.class); + private final boolean ifExists; + private final String policyName; + + /** + * constructor + */ + public DropStoragePolicyCommand(String policyName, boolean ifExists) { + super(PlanType.DROP_STORAGE_POLICY_COMMAND); + this.policyName = policyName; + this.ifExists = ifExists; + } + + @Override + public void doRun(ConnectContext ctx, StmtExecutor executor) throws Exception { + if (!Env.getCurrentEnv().getAccessManager() + .checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, + PrivPredicate.ADMIN.getPrivs().toString()); + } + // Drop the storage policy + DropPolicyLog dropPolicyLog = new DropPolicyLog(PolicyTypeEnum.STORAGE, policyName); + Env.getCurrentEnv().getPolicyMgr().dropPolicy(dropPolicyLog, ifExists); + } + + @Override + public R accept(PlanVisitor visitor, C context) { + return visitor.visitDropStoragePolicyCommand(this, context); + } +} + diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java index d6ede656542356..380e21614bafd2 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/visitor/CommandVisitor.java @@ -67,6 +67,7 @@ import org.apache.doris.nereids.trees.plans.commands.DropRepositoryCommand; import org.apache.doris.nereids.trees.plans.commands.DropRoleCommand; import org.apache.doris.nereids.trees.plans.commands.DropSqlBlockRuleCommand; +import org.apache.doris.nereids.trees.plans.commands.DropStoragePolicyCommand; import org.apache.doris.nereids.trees.plans.commands.DropUserCommand; import org.apache.doris.nereids.trees.plans.commands.DropWorkloadGroupCommand; import org.apache.doris.nereids.trees.plans.commands.DropWorkloadPolicyCommand; @@ -412,6 +413,10 @@ default R visitSetDefaultStorageVault(SetDefaultStorageVaultCommand setDefaultSt return visitCommand(setDefaultStorageVaultCommand, context); } + default R visitDropStoragePolicyCommand(DropStoragePolicyCommand dropStoragePolicyCommand, C context) { + return visitCommand(dropStoragePolicyCommand, context); + } + default R visitRefreshCatalogCommand(RefreshCatalogCommand refreshCatalogCommand, C context) { return visitCommand(refreshCatalogCommand, context); } diff --git a/fe/fe-core/src/main/java/org/apache/doris/policy/PolicyMgr.java b/fe/fe-core/src/main/java/org/apache/doris/policy/PolicyMgr.java index 6e8bd4f08cb2f7..8e639b36a25427 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/policy/PolicyMgr.java +++ b/fe/fe-core/src/main/java/org/apache/doris/policy/PolicyMgr.java @@ -171,11 +171,7 @@ public void addPolicy(Policy policy) throws UserException { } } - /** - * Drop policy through stmt. - **/ - public void dropPolicy(DropPolicyStmt stmt) throws DdlException, AnalysisException { - DropPolicyLog dropPolicyLog = DropPolicyLog.fromDropStmt(stmt); + public void dropPolicy(DropPolicyLog dropPolicyLog, boolean ifExists) throws DdlException, AnalysisException { if (dropPolicyLog.getType() == PolicyTypeEnum.STORAGE) { List databases = Env.getCurrentEnv().getInternalCatalog().getDbs(); for (Database db : databases) { @@ -198,7 +194,7 @@ public void dropPolicy(DropPolicyStmt stmt) throws DdlException, AnalysisExcepti writeLock(); try { if (!existPolicy(dropPolicyLog)) { - if (stmt.isIfExists()) { + if (ifExists) { return; } throw new DdlException("the policy " + dropPolicyLog.getPolicyName() + " not exist"); @@ -210,6 +206,14 @@ public void dropPolicy(DropPolicyStmt stmt) throws DdlException, AnalysisExcepti } } + /** + * Drop policy through stmt. + **/ + public void dropPolicy(DropPolicyStmt stmt) throws DdlException, AnalysisException { + DropPolicyLog dropPolicyLog = DropPolicyLog.fromDropStmt(stmt); + dropPolicy(dropPolicyLog, stmt.isIfExists()); + } + /** * Check whether the policy exist. * diff --git a/regression-test/suites/nereids_p0/test_drop_storage_policy_command.groovy b/regression-test/suites/nereids_p0/test_drop_storage_policy_command.groovy new file mode 100644 index 00000000000000..78e0e684b49699 --- /dev/null +++ b/regression-test/suites/nereids_p0/test_drop_storage_policy_command.groovy @@ -0,0 +1,63 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_drop_storage_policy_command", "drop,storage_policy") { + String policyName = "test_drop_storage_policy"; + String resourceName = "test_drop_storage_policy_resource"; + try { + // Drop existing storage policy and resource if they exist before creating new ones + try_sql("DROP STORAGE POLICY IF EXISTS ${policyName}") + try_sql("DROP RESOURCE IF EXISTS ${resourceName}") + // Create a new resource to be used in the storage policy + sql """ + CREATE RESOURCE IF NOT EXISTS "${resourceName}" + PROPERTIES( + "type"="s3", + "AWS_ENDPOINT" = "${getS3Endpoint()}", + "AWS_REGION" = "${getS3Region()}", + "AWS_ROOT_PATH" = "regression/cooldown", + "AWS_ACCESS_KEY" = "${getS3AK()}", + "AWS_SECRET_KEY" = "${getS3SK()}", + "AWS_MAX_CONNECTIONS" = "50", + "AWS_REQUEST_TIMEOUT_MS" = "3000", + "AWS_CONNECTION_TIMEOUT_MS" = "1000", + "AWS_BUCKET" = "${getS3BucketName()}", + "s3_validity_check" = "false" + ); + """ + + // Create a new storage policy to test the SHOW STORAGE POLICY command + sql """ + CREATE STORAGE POLICY IF NOT EXISTS ${policyName} + PROPERTIES( + "storage_resource" = "${resourceName}", + "cooldown_ttl" = "300" + ) + """ + + checkNereidsExecute("DROP STORAGE POLICY IF EXISTS ${policyName}") + + } catch (Exception e) { + // Log any exceptions that occur during testing + log.error("Failed to execute SHOW STORAGE POLICY command", e) + throw e + } finally { + // Clean up by dropping the storage policy and resource if they still exist + try_sql("DROP STORAGE POLICY IF EXISTS ${policyName}") + try_sql("DROP RESOURCE IF EXISTS ${resourceName}") + } +} From c45d46854de37fd23c538c664029b4557e50236c Mon Sep 17 00:00:00 2001 From: Gavin Chou Date: Tue, 24 Dec 2024 15:47:59 +0800 Subject: [PATCH 82/82] [opt](recycler) Improve robustness and observability (#45617) 1. Fix delete non-existed object, the original impl. does not handle correctly 2. Add recycle lag indicators: bvars for index, partition, rowset and txn 1. `recycle_index_earlest_ts_${instance_id}` dropped table/mv 2. `recycle_partition_earlest_ts_${instance_id}` dropped partitions 3. `recycle_rowset_earlest_ts_${instance_id}` compacted rowset 4. `recycle_tmp_rowset_earlest_ts_${instance_id}` aborted transactions tmp data 5. `recycle_expired_txn_label_earlest_ts_${instance_id}` expired labels and transactions 4. Add retry for scan_and_recycle() to prevent KV error like "Request future version" 6. Fix recycle delete instance may leak object data: we have to delete data first before delete KV 7. Improve code readability: remove duplicated code and add more comments --- cloud/src/common/bvars.cpp | 160 +++++------ cloud/src/common/bvars.h | 96 +++---- cloud/src/common/config.h | 2 + cloud/src/meta-service/meta_service.cpp | 1 + cloud/src/recycler/obj_storage_client.h | 10 +- cloud/src/recycler/recycler.cpp | 341 ++++++++++++++++-------- cloud/src/recycler/recycler.h | 20 +- cloud/src/recycler/s3_accessor.cpp | 42 ++- cloud/src/recycler/s3_obj_client.cpp | 7 +- 9 files changed, 398 insertions(+), 281 deletions(-) diff --git a/cloud/src/common/bvars.cpp b/cloud/src/common/bvars.cpp index 746f109ac6d7fd..a0b0a2da9c213e 100644 --- a/cloud/src/common/bvars.cpp +++ b/cloud/src/common/bvars.cpp @@ -20,6 +20,8 @@ #include #include +// clang-format off + // meta-service's bvars BvarLatencyRecorderWithTag g_bvar_ms_begin_txn("ms", "begin_txn"); BvarLatencyRecorderWithTag g_bvar_ms_precommit_txn("ms", "precommit_txn"); @@ -71,23 +73,27 @@ BvarLatencyRecorderWithTag g_bvar_ms_get_copy_files("ms", "get_copy_files"); BvarLatencyRecorderWithTag g_bvar_ms_filter_copy_files("ms", "filter_copy_files"); BvarLatencyRecorderWithTag g_bvar_ms_update_delete_bitmap("ms", "update_delete_bitmap"); BvarLatencyRecorderWithTag g_bvar_ms_get_delete_bitmap("ms", "get_delete_bitmap"); -BvarLatencyRecorderWithTag g_bvar_ms_get_delete_bitmap_update_lock("ms", - "get_delete_bitmap_update_lock"); +BvarLatencyRecorderWithTag g_bvar_ms_get_delete_bitmap_update_lock("ms", "get_delete_bitmap_update_lock"); BvarLatencyRecorderWithTag g_bvar_ms_remove_delete_bitmap("ms", "remove_delete_bitmap"); -BvarLatencyRecorderWithTag g_bvar_ms_remove_delete_bitmap_update_lock( - "ms", "remove_delete_bitmap_update_lock"); +BvarLatencyRecorderWithTag g_bvar_ms_remove_delete_bitmap_update_lock("ms", "remove_delete_bitmap_update_lock"); BvarLatencyRecorderWithTag g_bvar_ms_get_instance("ms", "get_instance"); BvarLatencyRecorderWithTag g_bvar_ms_get_rl_task_commit_attach("ms", "get_rl_task_commit_attach"); BvarLatencyRecorderWithTag g_bvar_ms_reset_rl_progress("ms", "reset_rl_progress"); BvarLatencyRecorderWithTag g_bvar_ms_get_txn_id("ms", "get_txn_id"); - BvarLatencyRecorderWithTag g_bvar_ms_start_tablet_job("ms", "start_tablet_job"); BvarLatencyRecorderWithTag g_bvar_ms_finish_tablet_job("ms", "finish_tablet_job"); BvarLatencyRecorderWithTag g_bvar_ms_get_cluster_status("ms", "get_cluster_status"); BvarLatencyRecorderWithTag g_bvar_ms_set_cluster_status("ms", "set_cluster_status"); - BvarLatencyRecorderWithTag g_bvar_ms_check_kv("ms", "check_kv"); +// recycler's bvars +// TODO: use mbvar for per instance, https://github.com/apache/brpc/blob/master/docs/cn/mbvar_c++.md +BvarStatusWithTag g_bvar_recycler_recycle_index_earlest_ts("recycler", "recycle_index_earlest_ts"); +BvarStatusWithTag g_bvar_recycler_recycle_partition_earlest_ts("recycler", "recycle_partition_earlest_ts"); +BvarStatusWithTag g_bvar_recycler_recycle_rowset_earlest_ts("recycler", "recycle_rowset_earlest_ts"); +BvarStatusWithTag g_bvar_recycler_recycle_tmp_rowset_earlest_ts("recycler", "recycle_tmp_rowset_earlest_ts"); +BvarStatusWithTag g_bvar_recycler_recycle_expired_txn_label_earlest_ts("recycler", "recycle_expired_txn_label_earlest_ts"); + // txn_kv's bvars bvar::LatencyRecorder g_bvar_txn_kv_get("txn_kv", "get"); bvar::LatencyRecorder g_bvar_txn_kv_range_get("txn_kv", "range_get"); @@ -101,107 +107,65 @@ bvar::LatencyRecorder g_bvar_txn_kv_range_remove("txn_kv", "range_remove"); bvar::LatencyRecorder g_bvar_txn_kv_get_read_version("txn_kv", "get_read_version"); bvar::LatencyRecorder g_bvar_txn_kv_get_committed_version("txn_kv", "get_committed_version"); bvar::LatencyRecorder g_bvar_txn_kv_batch_get("txn_kv", "batch_get"); - bvar::Adder g_bvar_txn_kv_get_count_normalized("txn_kv", "get_count_normalized"); - bvar::Adder g_bvar_txn_kv_commit_error_counter; -bvar::Window > g_bvar_txn_kv_commit_error_counter_minute( - "txn_kv", "commit_error", &g_bvar_txn_kv_commit_error_counter, 60); - +bvar::Window > g_bvar_txn_kv_commit_error_counter_minute("txn_kv", "commit_error", &g_bvar_txn_kv_commit_error_counter, 60); bvar::Adder g_bvar_txn_kv_commit_conflict_counter; -bvar::Window > g_bvar_txn_kv_commit_conflict_counter_minute( - "txn_kv", "commit_conflict", &g_bvar_txn_kv_commit_conflict_counter, 60); +bvar::Window > g_bvar_txn_kv_commit_conflict_counter_minute("txn_kv", "commit_conflict", &g_bvar_txn_kv_commit_conflict_counter, 60); +// fdb's bvars const int64_t BVAR_FDB_INVALID_VALUE = -99999999L; bvar::Status g_bvar_fdb_client_count("fdb_client_count", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_configuration_coordinators_count( - "fdb_configuration_coordinators_count", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_configuration_usable_regions("fdb_configuration_usable_regions", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_coordinators_unreachable_count( - "fdb_coordinators_unreachable_count", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_fault_tolerance_count("fdb_fault_tolerance_count", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_average_partition_size_bytes( - "fdb_data_average_partition_size_bytes", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_log_server_space_bytes("fdb_data_log_server_space_bytes", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_moving_data_highest_priority( - "fdb_data_moving_data_highest_priority", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_moving_data_in_flight_bytes( - "fdb_data_moving_data_in_flight_bytes", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_moving_data_in_queue_bytes( - "fdb_data_moving_data_in_queue_bytes", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_moving_total_written_bytes( - "fdb_data_moving_total_written_bytes", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_partition_count("fdb_data_partition_count", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_storage_server_space_bytes( - "fdb_data_storage_server_space_bytes", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_state_min_replicas_remaining( - "fdb_data_state_min_replicas_remaining", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_total_kv_size_bytes("fdb_data_total_kv_size_bytes", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_data_total_disk_used_bytes("fdb_data_total_disk_used_bytes", - BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_configuration_coordinators_count("fdb_configuration_coordinators_count", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_configuration_usable_regions("fdb_configuration_usable_regions", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_coordinators_unreachable_count("fdb_coordinators_unreachable_count", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_fault_tolerance_count("fdb_fault_tolerance_count", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_average_partition_size_bytes("fdb_data_average_partition_size_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_log_server_space_bytes("fdb_data_log_server_space_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_moving_data_highest_priority("fdb_data_moving_data_highest_priority", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_moving_data_in_flight_bytes("fdb_data_moving_data_in_flight_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_moving_data_in_queue_bytes("fdb_data_moving_data_in_queue_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_moving_total_written_bytes("fdb_data_moving_total_written_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_partition_count("fdb_data_partition_count", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_storage_server_space_bytes("fdb_data_storage_server_space_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_state_min_replicas_remaining("fdb_data_state_min_replicas_remaining", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_total_kv_size_bytes("fdb_data_total_kv_size_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_data_total_disk_used_bytes("fdb_data_total_disk_used_bytes", BVAR_FDB_INVALID_VALUE); bvar::Status g_bvar_fdb_generation("fdb_generation", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_incompatible_connections("fdb_incompatible_connections", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_latency_probe_transaction_start_ns( - "fdb_latency_probe_transaction_start_ns", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_latency_probe_commit_ns("fdb_latency_probe_commit_ns", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_latency_probe_read_ns("fdb_latency_probe_read_ns", - BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_incompatible_connections("fdb_incompatible_connections", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_latency_probe_transaction_start_ns("fdb_latency_probe_transaction_start_ns", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_latency_probe_commit_ns("fdb_latency_probe_commit_ns", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_latency_probe_read_ns("fdb_latency_probe_read_ns", BVAR_FDB_INVALID_VALUE); bvar::Status g_bvar_fdb_machines_count("fdb_machines_count", BVAR_FDB_INVALID_VALUE); bvar::Status g_bvar_fdb_process_count("fdb_process_count", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_qos_worst_data_lag_storage_server_ns( - "fdb_qos_worst_data_lag_storage_server_ns", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_qos_worst_durability_lag_storage_server_ns( - "fdb_qos_worst_durability_lag_storage_server_ns", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_qos_worst_log_server_queue_bytes( - "fdb_qos_worst_log_server_queue_bytes", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_qos_worst_storage_server_queue_bytes( - "fdb_qos_worst_storage_server_queue_bytes", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_conflict_rate_hz("fdb_workload_conflict_rate_hz", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_location_rate_hz("fdb_workload_location_rate_hz", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_keys_read_hz("fdb_workload_keys_read_hz", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_read_bytes_hz("fdb_workload_read_bytes_hz", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_read_rate_hz("fdb_workload_read_rate_hz", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_write_rate_hz("fdb_workload_write_rate_hz", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_written_bytes_hz("fdb_workload_written_bytes_hz", - BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_transactions_started_hz( - "fdb_workload_transactions_started_hz", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_transactions_committed_hz( - "fdb_workload_transactions_committed_hz", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_workload_transactions_rejected_hz( - "fdb_workload_transactions_rejected_hz", BVAR_FDB_INVALID_VALUE); -bvar::Status g_bvar_fdb_client_thread_busyness_percent( - "fdb_client_thread_busyness_percent", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_qos_worst_data_lag_storage_server_ns("fdb_qos_worst_data_lag_storage_server_ns", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_qos_worst_durability_lag_storage_server_ns("fdb_qos_worst_durability_lag_storage_server_ns", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_qos_worst_log_server_queue_bytes("fdb_qos_worst_log_server_queue_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_qos_worst_storage_server_queue_bytes("fdb_qos_worst_storage_server_queue_bytes", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_conflict_rate_hz("fdb_workload_conflict_rate_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_location_rate_hz("fdb_workload_location_rate_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_keys_read_hz("fdb_workload_keys_read_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_read_bytes_hz("fdb_workload_read_bytes_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_read_rate_hz("fdb_workload_read_rate_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_write_rate_hz("fdb_workload_write_rate_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_written_bytes_hz("fdb_workload_written_bytes_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_transactions_started_hz("fdb_workload_transactions_started_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_transactions_committed_hz("fdb_workload_transactions_committed_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_workload_transactions_rejected_hz("fdb_workload_transactions_rejected_hz", BVAR_FDB_INVALID_VALUE); +bvar::Status g_bvar_fdb_client_thread_busyness_percent("fdb_client_thread_busyness_percent", BVAR_FDB_INVALID_VALUE); // checker's bvars -BvarStatusWithTag g_bvar_checker_num_scanned("checker", "num_scanned"); -BvarStatusWithTag g_bvar_checker_num_scanned_with_segment("checker", - "num_scanned_with_segment"); -BvarStatusWithTag g_bvar_checker_num_check_failed("checker", "num_check_failed"); -BvarStatusWithTag g_bvar_checker_check_cost_s("checker", "check_cost_seconds"); -BvarStatusWithTag g_bvar_checker_enqueue_cost_s("checker", "enqueue_cost_seconds"); -BvarStatusWithTag g_bvar_checker_last_success_time_ms("checker", "last_success_time_ms"); -BvarStatusWithTag g_bvar_checker_instance_volume("checker", "instance_volume"); -BvarStatusWithTag g_bvar_inverted_checker_num_scanned("checker", "num_inverted_scanned"); -BvarStatusWithTag g_bvar_inverted_checker_num_check_failed("checker", - "num_inverted_check_failed"); +BvarStatusWithTag g_bvar_checker_num_scanned("checker", "num_scanned"); +BvarStatusWithTag g_bvar_checker_num_scanned_with_segment("checker", "num_scanned_with_segment"); +BvarStatusWithTag g_bvar_checker_num_check_failed("checker", "num_check_failed"); +BvarStatusWithTag g_bvar_checker_check_cost_s("checker", "check_cost_seconds"); +BvarStatusWithTag g_bvar_checker_enqueue_cost_s("checker", "enqueue_cost_seconds"); +BvarStatusWithTag g_bvar_checker_last_success_time_ms("checker", "last_success_time_ms"); +BvarStatusWithTag g_bvar_checker_instance_volume("checker", "instance_volume"); +BvarStatusWithTag g_bvar_inverted_checker_num_scanned("checker", "num_inverted_scanned"); +BvarStatusWithTag g_bvar_inverted_checker_num_check_failed("checker", "num_inverted_check_failed"); +BvarStatusWithTag g_bvar_inverted_checker_leaked_delete_bitmaps("checker", "leaked_delete_bitmaps"); +BvarStatusWithTag g_bvar_inverted_checker_abnormal_delete_bitmaps("checker", "abnormal_delete_bitmaps"); +BvarStatusWithTag g_bvar_inverted_checker_delete_bitmaps_scanned("checker", "delete_bitmap_keys_scanned"); -BvarStatusWithTag g_bvar_inverted_checker_leaked_delete_bitmaps("checker", - "leaked_delete_bitmaps"); -BvarStatusWithTag g_bvar_inverted_checker_abnormal_delete_bitmaps( - "checker", "abnormal_delete_bitmaps"); -BvarStatusWithTag g_bvar_inverted_checker_delete_bitmaps_scanned( - "checker", "delete_bitmap_keys_scanned"); \ No newline at end of file +// clang-format on diff --git a/cloud/src/common/bvars.h b/cloud/src/common/bvars.h index d0ad2e97957ae6..93340a6c0d291f 100644 --- a/cloud/src/common/bvars.h +++ b/cloud/src/common/bvars.h @@ -26,34 +26,53 @@ #include #include #include - -class BvarLatencyRecorderWithTag { +#include + +/** + * Manage bvars that with similar names (identical prefix) + * ${module}_${name}_${tag} + * where `tag` is added automatically when calling `get` or `put` + */ +template +class BvarWithTag { public: - BvarLatencyRecorderWithTag(std::string module, std::string name) + BvarWithTag(std::string module, std::string name) : module_(std::move(module)), name_(std::move(name)) {} - void put(const std::string& tag, int64_t value) { - std::shared_ptr instance = nullptr; + template + requires std::is_integral_v + void put(const std::string& tag, ValType value) { + std::shared_ptr instance = nullptr; { std::lock_guard l(mutex_); auto it = bvar_map_.find(tag); if (it == bvar_map_.end()) { - instance = std::make_shared(module_, name_ + "_" + tag); + instance = std::make_shared(module_, name_ + "_" + tag, ValType()); bvar_map_[tag] = instance; } else { instance = it->second; } } - (*instance) << value; + // FIXME(gavin): check bvar::Adder and more + if constexpr (std::is_same_v) { + (*instance) << value; + } else if constexpr (is_status) { + instance->set_value(value); + } else { + // This branch mean to be unreachable, add an assert(false) here to + // prevent missing branch match. + // Postpone deduction of static_assert by evaluating sizeof(T) + static_assert(!sizeof(Bvar), "all types must be matched with if constexpr"); + } } - std::shared_ptr get(const std::string& tag) { - std::shared_ptr instance = nullptr; + std::shared_ptr get(const std::string& tag) { + std::shared_ptr instance = nullptr; std::lock_guard l(mutex_); auto it = bvar_map_.find(tag); if (it == bvar_map_.end()) { - instance = std::make_shared(module_, name_ + "_" + tag); + instance = std::make_shared(module_, name_ + "_" + tag); bvar_map_[tag] = instance; return instance; } @@ -69,54 +88,14 @@ class BvarLatencyRecorderWithTag { bthread::Mutex mutex_; std::string module_; std::string name_; - std::map> bvar_map_; + std::map> bvar_map_; }; -template -class BvarStatusWithTag { -public: - BvarStatusWithTag(std::string module, std::string name) - : module_(std::move(module)), name_(std::move(name)) {} - - void put(const std::string& tag, T value) { - std::shared_ptr> instance = nullptr; - { - std::lock_guard l(mutex_); - auto it = bvar_map_.find(tag); - if (it == bvar_map_.end()) { - instance = std::make_shared>(module_, name_ + "_" + tag, T()); - bvar_map_[tag] = instance; - } else { - instance = it->second; - } - } - (*instance).set_value(value); - } - - std::shared_ptr> get(const std::string& tag) { - std::shared_ptr> instance = nullptr; - std::lock_guard l(mutex_); - - auto it = bvar_map_.find(tag); - if (it == bvar_map_.end()) { - instance = std::make_shared>(module_, name_ + "_" + tag); - bvar_map_[tag] = instance; - return instance; - } - return it->second; - } - - void remove(const std::string& tag) { - std::lock_guard l(mutex_); - bvar_map_.erase(tag); - } +using BvarLatencyRecorderWithTag = BvarWithTag; -private: - bthread::Mutex mutex_; - std::string module_; - std::string name_; - std::map>> bvar_map_; -}; +template + requires std::is_integral_v +using BvarStatusWithTag = BvarWithTag, true>; // meta-service's bvars extern BvarLatencyRecorderWithTag g_bvar_ms_begin_txn; @@ -182,6 +161,13 @@ extern BvarLatencyRecorderWithTag g_bvar_ms_reset_rl_progress; extern BvarLatencyRecorderWithTag g_bvar_ms_get_txn_id; extern BvarLatencyRecorderWithTag g_bvar_ms_check_kv; +// recycler's bvars +extern BvarStatusWithTag g_bvar_recycler_recycle_index_earlest_ts; +extern BvarStatusWithTag g_bvar_recycler_recycle_partition_earlest_ts; +extern BvarStatusWithTag g_bvar_recycler_recycle_rowset_earlest_ts; +extern BvarStatusWithTag g_bvar_recycler_recycle_tmp_rowset_earlest_ts; +extern BvarStatusWithTag g_bvar_recycler_recycle_expired_txn_label_earlest_ts; + // txn_kv's bvars extern bvar::LatencyRecorder g_bvar_txn_kv_get; extern bvar::LatencyRecorder g_bvar_txn_kv_range_get; diff --git a/cloud/src/common/config.h b/cloud/src/common/config.h index 03e2b63c72a7f6..4f3c49ee98dfe8 100644 --- a/cloud/src/common/config.h +++ b/cloud/src/common/config.h @@ -66,9 +66,11 @@ CONF_mInt64(dropped_partition_retention_seconds, "10800"); // 3h CONF_Strings(recycle_whitelist, ""); // Comma seprated list // These instances will not be recycled, only effective when whitelist is empty. CONF_Strings(recycle_blacklist, ""); // Comma seprated list +// IO worker thread pool concurrency: object list, delete CONF_mInt32(instance_recycler_worker_pool_size, "32"); CONF_Bool(enable_checker, "false"); // The parallelism for parallel recycle operation +// s3_producer_pool recycle_tablet_pool, delete single object in this pool CONF_Int32(recycle_pool_parallelism, "40"); // Currently only used for recycler test CONF_Bool(enable_inverted_check, "false"); diff --git a/cloud/src/meta-service/meta_service.cpp b/cloud/src/meta-service/meta_service.cpp index 26c8c30110a1db..4967750762da48 100644 --- a/cloud/src/meta-service/meta_service.cpp +++ b/cloud/src/meta-service/meta_service.cpp @@ -1183,6 +1183,7 @@ void MetaServiceImpl::commit_rowset(::google::protobuf::RpcController* controlle << ", rowset_id=" << rowset_id << ", rowset_meta_bytes=" << rowset_meta.ByteSizeLong() << ", segment_key_bounds_bytes=" << segment_key_bounds_bytes + << ", num_segments=" << rowset_meta.num_segments() << ", rowset_meta=" << rowset_meta.ShortDebugString(); } code = cast_as(err); diff --git a/cloud/src/recycler/obj_storage_client.h b/cloud/src/recycler/obj_storage_client.h index fc0211820d1a50..b3d5cd4978e7ac 100644 --- a/cloud/src/recycler/obj_storage_client.h +++ b/cloud/src/recycler/obj_storage_client.h @@ -30,9 +30,15 @@ struct ObjectStoragePathRef { }; struct ObjectStorageResponse { - ObjectStorageResponse(int r = 0, std::string msg = "") : ret(r), error_msg(std::move(msg)) {} + enum Code : int { + UNDEFINED = -1, + OK = 0, + NOT_FOUND = 1, + }; + + ObjectStorageResponse(int r = OK, std::string msg = "") : ret(r), error_msg(std::move(msg)) {} // clang-format off - int ret {0}; // To unify the error handle logic with BE, we'd better use the same error code as BE + int ret {OK}; // To unify the error handle logic with BE, we'd better use the same error code as BE // clang-format on std::string error_msg; }; diff --git a/cloud/src/recycler/recycler.cpp b/cloud/src/recycler/recycler.cpp index ca22b28e031c91..84d755958ee29c 100644 --- a/cloud/src/recycler/recycler.cpp +++ b/cloud/src/recycler/recycler.cpp @@ -40,6 +40,7 @@ #ifdef UNIT_TEST #include "../test/mock_accessor.h" #endif +#include "common/bvars.h" #include "common/config.h" #include "common/encryption_util.h" #include "common/logging.h" @@ -576,11 +577,11 @@ int InstanceRecycler::init() { template auto task_wrapper(Func... funcs) -> std::function { return [funcs...]() { - return [](std::initializer_list numbers) { + return [](std::initializer_list ret_vals) { int i = 0; - for (int num : numbers) { - if (num != 0) { - i = num; + for (int ret : ret_vals) { + if (ret != 0) { + i = ret; } } return i; @@ -597,11 +598,15 @@ int InstanceRecycler::do_recycle() { fmt::format("instance id {}", instance_id_), [](int r) { return r != 0; }); sync_executor - .add(task_wrapper( + .add(task_wrapper( // dropped table and dropped partition need to be recycled in series + // becase they may both recycle the same set of tablets + // recycle dropped table or idexes(mv, rollup) [this]() -> int { return InstanceRecycler::recycle_indexes(); }, - [this]() -> int { return InstanceRecycler::recycle_partitions(); }, - [this]() -> int { return InstanceRecycler::recycle_tmp_rowsets(); }, - [this]() -> int { return InstanceRecycler::recycle_rowsets(); })) + // recycle dropped partitions + [this]() -> int { return InstanceRecycler::recycle_partitions(); })) + .add(task_wrapper( + [this]() -> int { return InstanceRecycler::recycle_tmp_rowsets(); })) + .add(task_wrapper([this]() -> int { return InstanceRecycler::recycle_rowsets(); })) .add(task_wrapper( [this]() { return InstanceRecycler::abort_timeout_txn(); }, [this]() { return InstanceRecycler::recycle_expired_txn_label(); })) @@ -625,6 +630,11 @@ int InstanceRecycler::do_recycle() { } } +/** + * 1. delete all remote data + * 2. delete all kv + * 3. remove instance kv + */ int InstanceRecycler::recycle_deleted_instance() { LOG_INFO("begin to recycle deleted instance").tag("instance_id", instance_id_); @@ -638,6 +648,29 @@ int InstanceRecycler::recycle_deleted_instance() { << "s, instance_id=" << instance_id_; }); + // delete all remote data + for (auto& [_, accessor] : accessor_map_) { + if (stopped()) { + return ret; + } + + LOG(INFO) << "begin to delete all objects in " << accessor->uri(); + int del_ret = accessor->delete_all(); + if (del_ret == 0) { + LOG(INFO) << "successfully delete all objects in " << accessor->uri(); + } else if (del_ret != 1) { // no need to log, because S3Accessor has logged this error + // If `del_ret == 1`, it can be considered that the object data has been recycled by cloud platform, + // so the recycling has been successful. + ret = -1; + } + } + + if (ret != 0) { + LOG(WARNING) << "failed to delete all data of deleted instance=" << instance_id_; + return ret; + } + + // delete all kv std::unique_ptr txn; TxnErrorCode err = txn_kv_->create_txn(&txn); if (err != TxnErrorCode::TXN_OK) { @@ -681,22 +714,6 @@ int InstanceRecycler::recycle_deleted_instance() { ret = -1; } - for (auto& [_, accessor] : accessor_map_) { - if (stopped()) { - return ret; - } - - LOG(INFO) << "begin to delete all objects in " << accessor->uri(); - int del_ret = accessor->delete_all(); - if (del_ret == 0) { - LOG(INFO) << "successfully delete all objects in " << accessor->uri(); - } else if (del_ret != 1) { // no need to log, because S3Accessor has logged this error - // If `del_ret == 1`, it can be considered that the object data has been recycled by cloud platform, - // so the recycling has been successful. - ret = -1; - } - } - if (ret == 0) { // remove instance kv // ATTN: MUST ensure that cloud platform won't regenerate the same instance id @@ -721,9 +738,9 @@ int InstanceRecycler::recycle_deleted_instance() { int InstanceRecycler::recycle_indexes() { const std::string task_name = "recycle_indexes"; - int num_scanned = 0; - int num_expired = 0; - int num_recycled = 0; + int64_t num_scanned = 0; + int64_t num_expired = 0; + int64_t num_recycled = 0; RecycleIndexKeyInfo index_key_info0 {instance_id_, 0}; RecycleIndexKeyInfo index_key_info1 {instance_id_, INT64_MAX}; @@ -748,9 +765,11 @@ int InstanceRecycler::recycle_indexes() { .tag("num_recycled", num_recycled); }); - auto calc_expiration = [](const RecycleIndexPB& index) -> int64_t { + int64_t earlest_ts = std::numeric_limits::max(); + + auto calc_expiration = [&earlest_ts, this](const RecycleIndexPB& index) { if (config::force_immediate_recycle) { - return 0; + return 0L; } int64_t expiration = index.expiration() > 0 ? index.expiration() : index.creation_time(); int64_t retention_seconds = config::retention_seconds; @@ -758,7 +777,12 @@ int InstanceRecycler::recycle_indexes() { retention_seconds = std::min(config::dropped_index_retention_seconds, retention_seconds); } - return expiration + retention_seconds; + int64_t final_expiration = expiration + retention_seconds; + if (earlest_ts > final_expiration) { + earlest_ts = final_expiration; + g_bvar_recycler_recycle_index_earlest_ts.put(instance_id_, earlest_ts); + } + return final_expiration; }; // Elements in `index_keys` has the same lifetime as `it` in `scan_and_recycle` @@ -919,9 +943,9 @@ bool check_lazy_txn_finished(std::shared_ptr txn_kv, const std::string in int InstanceRecycler::recycle_partitions() { const std::string task_name = "recycle_partitions"; - int num_scanned = 0; - int num_expired = 0; - int num_recycled = 0; + int64_t num_scanned = 0; + int64_t num_expired = 0; + int64_t num_recycled = 0; RecyclePartKeyInfo part_key_info0 {instance_id_, 0}; RecyclePartKeyInfo part_key_info1 {instance_id_, INT64_MAX}; @@ -946,9 +970,11 @@ int InstanceRecycler::recycle_partitions() { .tag("num_recycled", num_recycled); }); - auto calc_expiration = [](const RecyclePartitionPB& partition) -> int64_t { + int64_t earlest_ts = std::numeric_limits::max(); + + auto calc_expiration = [&earlest_ts, this](const RecyclePartitionPB& partition) { if (config::force_immediate_recycle) { - return 0; + return 0L; } int64_t expiration = partition.expiration() > 0 ? partition.expiration() : partition.creation_time(); @@ -957,7 +983,12 @@ int InstanceRecycler::recycle_partitions() { retention_seconds = std::min(config::dropped_partition_retention_seconds, retention_seconds); } - return expiration + retention_seconds; + int64_t final_expiration = expiration + retention_seconds; + if (earlest_ts > final_expiration) { + earlest_ts = final_expiration; + g_bvar_recycler_recycle_partition_earlest_ts.put(instance_id_, earlest_ts); + } + return final_expiration; }; // Elements in `partition_keys` has the same lifetime as `it` in `scan_and_recycle` @@ -1074,8 +1105,8 @@ int InstanceRecycler::recycle_partitions() { } int InstanceRecycler::recycle_versions() { - int num_scanned = 0; - int num_recycled = 0; + int64_t num_scanned = 0; + int64_t num_recycled = 0; LOG_INFO("begin to recycle table and partition versions").tag("instance_id", instance_id_); @@ -1152,13 +1183,14 @@ int InstanceRecycler::recycle_versions() { int InstanceRecycler::recycle_tablets(int64_t table_id, int64_t index_id, int64_t partition_id, bool is_empty_tablet) { - int num_scanned = 0; - std::atomic_int num_recycled = 0; + int64_t num_scanned = 0; + std::atomic_long num_recycled = 0; std::string tablet_key_begin, tablet_key_end; std::string stats_key_begin, stats_key_end; std::string job_key_begin, job_key_end; + std::string tablet_belongs; if (partition_id > 0) { // recycle tablets in a partition belonging to the index meta_tablet_key({instance_id_, table_id, index_id, partition_id, 0}, &tablet_key_begin); @@ -1167,6 +1199,7 @@ int InstanceRecycler::recycle_tablets(int64_t table_id, int64_t index_id, int64_ stats_tablet_key({instance_id_, table_id, index_id, partition_id + 1, 0}, &stats_key_end); job_tablet_key({instance_id_, table_id, index_id, partition_id, 0}, &job_key_begin); job_tablet_key({instance_id_, table_id, index_id, partition_id + 1, 0}, &job_key_end); + tablet_belongs = "partition"; } else { // recycle tablets in the index meta_tablet_key({instance_id_, table_id, index_id, 0, 0}, &tablet_key_begin); @@ -1175,9 +1208,10 @@ int InstanceRecycler::recycle_tablets(int64_t table_id, int64_t index_id, int64_ stats_tablet_key({instance_id_, table_id, index_id + 1, 0, 0}, &stats_key_end); job_tablet_key({instance_id_, table_id, index_id, 0, 0}, &job_key_begin); job_tablet_key({instance_id_, table_id, index_id + 1, 0, 0}, &job_key_end); + tablet_belongs = "index"; } - LOG_INFO("begin to recycle tablets") + LOG_INFO("begin to recycle tablets of the " + tablet_belongs) .tag("table_id", table_id) .tag("index_id", index_id) .tag("partition_id", partition_id); @@ -1186,7 +1220,7 @@ int InstanceRecycler::recycle_tablets(int64_t table_id, int64_t index_id, int64_ std::unique_ptr> defer_log_statistics((int*)0x01, [&](int*) { auto cost = duration(steady_clock::now() - start_time).count(); - LOG_INFO("recycle tablets finished, cost={}s", cost) + LOG_INFO("recycle tablets of " + tablet_belongs + " finished, cost={}s", cost) .tag("instance_id", instance_id_) .tag("table_id", table_id) .tag("index_id", index_id) @@ -1612,12 +1646,15 @@ int InstanceRecycler::recycle_tablet(int64_t tablet_id) { int InstanceRecycler::recycle_rowsets() { const std::string task_name = "recycle_rowsets"; - int num_scanned = 0; - int num_expired = 0; - int num_prepare = 0; - size_t total_rowset_size = 0; + int64_t num_scanned = 0; + int64_t num_expired = 0; + int64_t num_prepare = 0; + int64_t num_compacted = 0; + int64_t num_empty_rowset = 0; + size_t total_rowset_key_size = 0; + size_t total_rowset_value_size = 0; size_t expired_rowset_size = 0; - std::atomic_int num_recycled = 0; + std::atomic_long num_recycled = 0; RecycleRowsetKeyInfo recyc_rs_key_info0 {instance_id_, 0, ""}; RecycleRowsetKeyInfo recyc_rs_key_info1 {instance_id_, INT64_MAX, ""}; @@ -1640,8 +1677,11 @@ int InstanceRecycler::recycle_rowsets() { .tag("num_scanned", num_scanned) .tag("num_expired", num_expired) .tag("num_recycled", num_recycled) - .tag("num_prepare", num_prepare) - .tag("total_rowset_meta_size", total_rowset_size) + .tag("num_recycled.prepare", num_prepare) + .tag("num_recycled.compacted", num_compacted) + .tag("num_recycled.empty_rowset", num_empty_rowset) + .tag("total_rowset_meta_key_size_scanned", total_rowset_key_size) + .tag("total_rowset_meta_value_size_scanned", total_rowset_value_size) .tag("expired_rowset_meta_size", expired_rowset_size); }); @@ -1692,9 +1732,11 @@ int InstanceRecycler::recycle_rowsets() { return 0; }; - auto calc_expiration = [](const RecycleRowsetPB& rs) -> int64_t { + int64_t earlest_ts = std::numeric_limits::max(); + + auto calc_expiration = [&earlest_ts, this](const RecycleRowsetPB& rs) { if (config::force_immediate_recycle) { - return 0; + return 0L; } // RecycleRowsetPB created by compacted or dropped rowset has no expiration time, and will be recycled when exceed retention time int64_t expiration = rs.expiration() > 0 ? rs.expiration() : rs.creation_time(); @@ -1703,12 +1745,18 @@ int InstanceRecycler::recycle_rowsets() { retention_seconds = std::min(config::compacted_rowset_retention_seconds, retention_seconds); } - return expiration + retention_seconds; + int64_t final_expiration = expiration + retention_seconds; + if (earlest_ts > final_expiration) { + earlest_ts = final_expiration; + g_bvar_recycler_recycle_rowset_earlest_ts.put(instance_id_, earlest_ts); + } + return final_expiration; }; auto handle_rowset_kv = [&](std::string_view k, std::string_view v) -> int { ++num_scanned; - total_rowset_size += v.size(); + total_rowset_key_size += k.size(); + total_rowset_value_size += v.size(); RecycleRowsetPB rowset; if (!rowset.ParseFromArray(v.data(), v.size())) { LOG_WARNING("malformed recycle rowset").tag("key", hex(k)); @@ -1780,9 +1828,12 @@ int InstanceRecycler::recycle_rowsets() { return -1; } } else { + num_compacted += rowset.type() == RecycleRowsetPB::COMPACT; rowset_keys.emplace_back(k); if (rowset_meta->num_segments() > 0) { // Skip empty rowset rowsets.push_back(std::move(*rowset_meta)); + } else { + ++num_empty_rowset; } } return 0; @@ -1823,8 +1874,7 @@ int InstanceRecycler::recycle_rowsets() { return ret; } -bool check_txn_abort(std::shared_ptr txn_kv, const std::string& instance_id, - int64_t txn_id) { +bool is_txn_aborted(std::shared_ptr txn_kv, const std::string& instance_id, int64_t txn_id) { std::unique_ptr txn; TxnErrorCode err = txn_kv->create_txn(&txn); if (err != TxnErrorCode::TXN_OK) { @@ -1883,11 +1933,12 @@ bool check_txn_abort(std::shared_ptr txn_kv, const std::string& instance_ int InstanceRecycler::recycle_tmp_rowsets() { const std::string task_name = "recycle_tmp_rowsets"; - int num_scanned = 0; - int num_expired = 0; - int num_recycled = 0; + int64_t num_scanned = 0; + int64_t num_expired = 0; + int64_t num_recycled = 0; size_t expired_rowset_size = 0; - size_t total_rowset_size = 0; + size_t total_rowset_key_size = 0; + size_t total_rowset_value_size = 0; MetaRowsetTmpKeyInfo tmp_rs_key_info0 {instance_id_, 0, 0}; MetaRowsetTmpKeyInfo tmp_rs_key_info1 {instance_id_, INT64_MAX, 0}; @@ -1910,41 +1961,54 @@ int InstanceRecycler::recycle_tmp_rowsets() { .tag("num_scanned", num_scanned) .tag("num_expired", num_expired) .tag("num_recycled", num_recycled) - .tag("total_rowset_meta_size", total_rowset_size) - .tag("expired_rowset_meta_size", expired_rowset_size); + .tag("total_rowset_meta_key_size_scanned", total_rowset_key_size) + .tag("total_rowset_meta_value_size_scanned", total_rowset_value_size) + .tag("expired_rowset_meta_size_recycled", expired_rowset_size); }); // Elements in `tmp_rowset_keys` has the same lifetime as `it` std::vector tmp_rowset_keys; std::vector tmp_rowsets; + int64_t earlest_ts = std::numeric_limits::max(); + auto calc_expiration = [&earlest_ts, this](const doris::RowsetMetaCloudPB& rowset) { + // ATTN: `txn_expiration` should > 0, however we use `creation_time` + a large `retention_time` (> 1 day in production environment) + // when `txn_expiration` <= 0 in some unexpected situation (usually when there are bugs). This is usually safe, coz loading + // duration or timeout always < `retention_time` in practice. + int64_t expiration = + rowset.txn_expiration() > 0 ? rowset.txn_expiration() : rowset.creation_time(); + expiration = config::force_immediate_recycle ? 0 : expiration; + int64_t final_expiration = expiration + config::retention_seconds; + if (earlest_ts > final_expiration) { + earlest_ts = final_expiration; + g_bvar_recycler_recycle_tmp_rowset_earlest_ts.put(instance_id_, earlest_ts); + } + return final_expiration; + }; + auto handle_rowset_kv = [&num_scanned, &num_expired, &tmp_rowset_keys, &tmp_rowsets, - &expired_rowset_size, &total_rowset_size, + &expired_rowset_size, &total_rowset_key_size, &total_rowset_value_size, + &calc_expiration, this](std::string_view k, std::string_view v) -> int { ++num_scanned; - total_rowset_size += v.size(); + total_rowset_key_size += k.size(); + total_rowset_value_size += v.size(); doris::RowsetMetaCloudPB rowset; if (!rowset.ParseFromArray(v.data(), v.size())) { LOG_WARNING("malformed rowset meta").tag("key", hex(k)); return -1; } - int64_t current_time = ::time(nullptr); - // ATTN: `txn_expiration` should > 0, however we use `creation_time` + a large `retention_time` (> 1 day in production environment) - // when `txn_expiration` <= 0 in some unexpected situation (usually when there are bugs). This is usually safe, coz loading - // duration or timeout always < `retention_time` in practice. - int64_t expiration = config::force_immediate_recycle ? 0 - : rowset.txn_expiration() > 0 ? rowset.txn_expiration() - : rowset.creation_time(); + int64_t expiration = calc_expiration(rowset); VLOG_DEBUG << "recycle tmp rowset scan, key=" << hex(k) << " num_scanned=" << num_scanned << " num_expired=" << num_expired << " expiration=" << expiration << " txn_expiration=" << rowset.txn_expiration() << " rowset_creation_time=" << rowset.creation_time(); - if (current_time < expiration + config::retention_seconds) { - // not expired + int64_t current_time = ::time(nullptr); + if (current_time < expiration) { // not expired return 0; } - if (!check_txn_abort(txn_kv_, instance_id_, rowset.txn_id())) { + if (!is_txn_aborted(txn_kv_, instance_id_, rowset.txn_id())) { return 0; } @@ -1964,7 +2028,9 @@ int InstanceRecycler::recycle_tmp_rowsets() { << " tablet_id=" << rowset.tablet_id() << " rowset_id=" << rowset.rowset_id_v2() << " version=[" << rowset.start_version() << '-' << rowset.end_version() << "] txn_id=" << rowset.txn_id() << " rowset_meta_size=" << v.size() - << " creation_time" << rowset.creation_time(); + << " creation_time=" << rowset.creation_time() << " num_scanned=" << num_scanned + << " num_expired=" << num_expired; + tmp_rowset_keys.push_back(k); if (rowset.num_segments() > 0) { // Skip empty rowset tmp_rowsets.push_back(std::move(rowset)); @@ -1997,31 +2063,57 @@ int InstanceRecycler::scan_and_recycle( std::string begin, std::string_view end, std::function recycle_func, std::function loop_done) { + LOG(INFO) << "begin scan_and_recycle key_range=[" << hex(begin) << "," << hex(end) << ")"; int ret = 0; + int64_t cnt = 0; + int get_range_retried = 0; + std::string err; + std::unique_ptr> defer_log( + (int*)0x01, [begin, end, &err, &ret, &cnt, &get_range_retried](int*) { + LOG(INFO) << "finish scan_and_recycle key_range=[" << hex(begin) << "," << hex(end) + << ") num_scanned=" << cnt << " get_range_retried=" << get_range_retried + << " ret=" << ret << " err=" << err; + }); + std::unique_ptr it; do { - int get_ret = txn_get(txn_kv_.get(), begin, end, it); - if (get_ret != 0) { - LOG(WARNING) << "failed to get kv, key=" << begin << " ret=" << get_ret; + if (get_range_retried > 1000) { + err = "txn_get exceeds max retry, may not scan all keys"; + ret = -1; return -1; } - VLOG_DEBUG << "fetch " << it->size() << " kv"; + int get_ret = txn_get(txn_kv_.get(), begin, end, it); + if (get_ret != 0) { // txn kv may complain "Request for future version" + LOG(WARNING) << "failed to get kv, range=[" << hex(begin) << "," << hex(end) + << ") num_scanned=" << cnt << " txn_get_ret=" << get_ret + << " get_range_retried=" << get_range_retried; + ++get_range_retried; + std::this_thread::sleep_for(std::chrono::milliseconds(500)); + continue; // try again + } if (!it->has_next()) { - VLOG_DEBUG << "no keys in the given range, begin=" << hex(begin) << " end=" << hex(end); - break; + LOG(INFO) << "no keys in the given range=[" << hex(begin) << "," << hex(end) << ")"; + break; // scan finished } while (it->has_next()) { + ++cnt; // recycle corresponding resources auto [k, v] = it->next(); if (!it->has_next()) { begin = k; VLOG_DEBUG << "iterator has no more kvs. key=" << hex(k); } - if (recycle_func(k, v) != 0) ret = -1; + // if we want to continue scanning, the recycle_func should not return non-zero + if (recycle_func(k, v) != 0) { + err = "recycle_func error"; + ret = -1; + } } begin.push_back('\x00'); // Update to next smallest key for iteration - if (loop_done) { - if (loop_done() != 0) ret = -1; + // if we want to continue scanning, the recycle_func should not return non-zero + if (loop_done && loop_done() != 0) { + err = "loop_done error"; + ret = -1; } } while (it->more() && !stopped()); return ret; @@ -2029,10 +2121,10 @@ int InstanceRecycler::scan_and_recycle( int InstanceRecycler::abort_timeout_txn() { const std::string task_name = "abort_timeout_txn"; - int num_scanned = 0; - int num_timeout = 0; - int num_abort = 0; - int num_advance = 0; + int64_t num_scanned = 0; + int64_t num_timeout = 0; + int64_t num_abort = 0; + int64_t num_advance = 0; TxnRunningKeyInfo txn_running_key_info0 {instance_id_, 0, 0}; TxnRunningKeyInfo txn_running_key_info1 {instance_id_, INT64_MAX, INT64_MAX}; @@ -2169,9 +2261,9 @@ int InstanceRecycler::abort_timeout_txn() { int InstanceRecycler::recycle_expired_txn_label() { const std::string task_name = "recycle_expired_txn_label"; - int num_scanned = 0; - int num_expired = 0; - int num_recycled = 0; + int64_t num_scanned = 0; + int64_t num_expired = 0; + int64_t num_recycled = 0; RecycleTxnKeyInfo recycle_txn_key_info0 {instance_id_, 0, 0}; RecycleTxnKeyInfo recycle_txn_key_info1 {instance_id_, INT64_MAX, INT64_MAX}; @@ -2195,11 +2287,23 @@ int InstanceRecycler::recycle_expired_txn_label() { .tag("num_recycled", num_recycled); }); - int64_t current_time = + int64_t earlest_ts = std::numeric_limits::max(); + auto calc_expiration = [&earlest_ts, this](const RecycleTxnPB& recycle_txn_pb) { + int64_t final_expiration = + recycle_txn_pb.creation_time() + config::label_keep_max_second * 1000L; + if (earlest_ts > final_expiration / 1000) { + earlest_ts = final_expiration / 1000; + g_bvar_recycler_recycle_expired_txn_label_earlest_ts.put(instance_id_, earlest_ts); + } + return final_expiration; + }; + + int64_t current_time_ms = duration_cast(system_clock::now().time_since_epoch()).count(); - auto handle_recycle_txn_kv = [&num_scanned, &num_expired, &num_recycled, ¤t_time, this]( - std::string_view k, std::string_view v) -> int { + auto handle_recycle_txn_kv = [&num_scanned, &num_expired, &num_recycled, ¤t_time_ms, + &calc_expiration, + this](std::string_view k, std::string_view v) -> int { ++num_scanned; RecycleTxnPB recycle_txn_pb; if (!recycle_txn_pb.ParseFromArray(v.data(), v.size())) { @@ -2208,13 +2312,13 @@ int InstanceRecycler::recycle_expired_txn_label() { } if ((config::force_immediate_recycle) || (recycle_txn_pb.has_immediate() && recycle_txn_pb.immediate()) || - (recycle_txn_pb.creation_time() + config::label_keep_max_second * 1000L <= - current_time)) { - LOG_INFO("found recycle txn").tag("key", hex(k)); + (calc_expiration(recycle_txn_pb) <= current_time_ms)) { + VLOG_DEBUG << "found recycle txn, key=" << hex(k); num_expired++; } else { return 0; } + std::string_view k1 = k; //RecycleTxnKeyInfo 0:instance_id 1:db_id 2:txn_id k1.remove_prefix(1); // Remove key space @@ -2414,10 +2518,10 @@ struct BatchObjStoreAccessor { }; int InstanceRecycler::recycle_copy_jobs() { - int num_scanned = 0; - int num_finished = 0; - int num_expired = 0; - int num_recycled = 0; + int64_t num_scanned = 0; + int64_t num_finished = 0; + int64_t num_expired = 0; + int64_t num_recycled = 0; // Used for INTERNAL stage's copy jobs to tag each batch for log trace uint64_t batch_count = 0; const std::string task_name = "recycle_copy_jobs"; @@ -2659,8 +2763,8 @@ int InstanceRecycler::init_copy_job_accessor(const std::string& stage_id, } int InstanceRecycler::recycle_stage() { - int num_scanned = 0; - int num_recycled = 0; + int64_t num_scanned = 0; + int64_t num_recycled = 0; const std::string task_name = "recycle_stage"; LOG_INFO("begin to recycle stage").tag("instance_id", instance_id_); @@ -2680,12 +2784,9 @@ int InstanceRecycler::recycle_stage() { RecycleStageKeyInfo key_info0 {instance_id_, ""}; RecycleStageKeyInfo key_info1 {instance_id_, "\xff"}; - std::string key0; - std::string key1; - recycle_stage_key(key_info0, &key0); - recycle_stage_key(key_info1, &key1); + std::string key0 = recycle_stage_key(key_info0); + std::string key1 = recycle_stage_key(key_info1); - // Elements in `tmp_rowset_keys` has the same lifetime as `it` std::vector stage_keys; auto recycle_func = [&start_time, &num_scanned, &num_recycled, &stage_keys, this]( std::string_view k, std::string_view v) -> int { @@ -2775,6 +2876,12 @@ int InstanceRecycler::recycle_expired_stage_objects() { }); int ret = 0; for (const auto& stage : instance_info_.stages()) { + std::stringstream ss; + ss << "instance_id=" << instance_id_ << ", stage_id=" << stage.stage_id() + << ", user_name=" << stage.mysql_user_name().at(0) + << ", user_id=" << stage.mysql_user_id().at(0) + << ", prefix=" << stage.obj_info().prefix(); + if (stopped()) break; if (stage.type() == StagePB::EXTERNAL) { continue; @@ -2788,7 +2895,7 @@ int InstanceRecycler::recycle_expired_stage_objects() { const auto& old_obj = instance_info_.obj_info()[idx - 1]; auto s3_conf = S3Conf::from_obj_store_info(old_obj); if (!s3_conf) { - LOG(WARNING) << "failed to init accessor"; + LOG(WARNING) << "failed to init s3_conf with obj_info=" << old_obj.DebugString(); continue; } @@ -2796,16 +2903,18 @@ int InstanceRecycler::recycle_expired_stage_objects() { std::shared_ptr accessor; int ret1 = S3Accessor::create(std::move(*s3_conf), &accessor); if (ret1 != 0) { - LOG(WARNING) << "failed to init s3 accessor ret=" << ret1; + LOG(WARNING) << "failed to init s3 accessor ret=" << ret1 << " " << ss.str(); + ret = -1; + continue; + } + + if (s3_conf->prefix.find("/stage/") == std::string::npos) { + LOG(WARNING) << "try to delete illegal prefix, which is catastrophic, " << ss.str(); ret = -1; continue; } - LOG(INFO) << "recycle expired stage objects, instance_id=" << instance_id_ - << ", stage_id=" << stage.stage_id() - << ", user_name=" << stage.mysql_user_name().at(0) - << ", user_id=" << stage.mysql_user_id().at(0) - << ", prefix=" << stage.obj_info().prefix(); + LOG(INFO) << "recycle expired stage objects, " << ss.str(); int64_t expiration_time = duration_cast(system_clock::now().time_since_epoch()).count() - config::internal_stage_objects_expire_time_second; @@ -2814,8 +2923,8 @@ int InstanceRecycler::recycle_expired_stage_objects() { } ret1 = accessor->delete_all(expiration_time); if (ret1 != 0) { - LOG(WARNING) << "failed to recycle expired stage objects, instance_id=" << instance_id_ - << ", stage_id=" << stage.stage_id() << ", ret=" << ret1; + LOG(WARNING) << "failed to recycle expired stage objects, ret=" << ret1 << " " + << ss.str(); ret = -1; continue; } diff --git a/cloud/src/recycler/recycler.h b/cloud/src/recycler/recycler.h index 91a461f474faed..cf23dcacd2fdca 100644 --- a/cloud/src/recycler/recycler.h +++ b/cloud/src/recycler/recycler.h @@ -55,7 +55,9 @@ struct RecyclerThreadPoolGroup { RecyclerThreadPoolGroup& operator=(RecyclerThreadPoolGroup& other) = default; RecyclerThreadPoolGroup& operator=(RecyclerThreadPoolGroup&& other) = default; RecyclerThreadPoolGroup(RecyclerThreadPoolGroup&&) = default; + // used for accessor.delete_files, accessor.delete_directory std::shared_ptr s3_producer_pool; + // used for InstanceRecycler::recycle_tablet std::shared_ptr recycle_tablet_pool; std::shared_ptr group_recycle_function_pool; }; @@ -128,19 +130,26 @@ class InstanceRecycler { // returns 0 for success otherwise error int recycle_deleted_instance(); - // scan and recycle expired indexes + // scan and recycle expired indexes: + // 1. dropped table, dropped mv + // 2. half-successtable/index when create // returns 0 for success otherwise error int recycle_indexes(); - // scan and recycle expired partitions + // scan and recycle expired partitions: + // 1. dropped parttion + // 2. half-success partition when create // returns 0 for success otherwise error int recycle_partitions(); - // scan and recycle expired rowsets + // scan and recycle expired rowsets: + // 1. prepare_rowset will produce recycle_rowset before uploading data to remote storage (memo) + // 2. compaction will change the input rowsets to recycle_rowset // returns 0 for success otherwise error int recycle_rowsets(); - // scan and recycle expired tmp rowsets + // scan and recycle expired tmp rowsets: + // 1. commit_rowset will produce tmp_rowset when finish upload data (load or compaction) to remote storage // returns 0 for success otherwise error int recycle_tmp_rowsets(); @@ -203,12 +212,15 @@ class InstanceRecycler { int scan_and_recycle(std::string begin, std::string_view end, std::function recycle_func, std::function loop_done = nullptr); + // return 0 for success otherwise error int delete_rowset_data(const doris::RowsetMetaCloudPB& rs_meta_pb); + // return 0 for success otherwise error // NOTE: this function ONLY be called when the file paths cannot be calculated int delete_rowset_data(const std::string& resource_id, int64_t tablet_id, const std::string& rowset_id); + // return 0 for success otherwise error int delete_rowset_data(const std::vector& rowsets); diff --git a/cloud/src/recycler/s3_accessor.cpp b/cloud/src/recycler/s3_accessor.cpp index 1aca88d2d1161d..224b36c277c532 100644 --- a/cloud/src/recycler/s3_accessor.cpp +++ b/cloud/src/recycler/s3_accessor.cpp @@ -282,6 +282,11 @@ int S3Accessor::init() { Aws::Client::ClientConfiguration aws_config; aws_config.endpointOverride = conf_.endpoint; aws_config.region = conf_.region; + // Aws::Http::CurlHandleContainer::AcquireCurlHandle() may be blocked if the connecitons are bottleneck + aws_config.maxConnections = std::max((long)(config::recycle_pool_parallelism + + config::instance_recycler_worker_pool_size), + (long)aws_config.maxConnections); + if (config::s3_client_http_scheme == "http") { aws_config.scheme = Aws::Http::Scheme::HTTP; } @@ -349,7 +354,12 @@ int S3Accessor::delete_files(const std::vector& paths) { int S3Accessor::delete_file(const std::string& path) { LOG_INFO("delete file").tag("uri", to_uri(path)); - return obj_client_->delete_object({.bucket = conf_.bucket, .key = get_key(path)}).ret; + int ret = obj_client_->delete_object({.bucket = conf_.bucket, .key = get_key(path)}).ret; + static_assert(ObjectStorageResponse::OK == 0); + if (ret == ObjectStorageResponse::OK || ret == ObjectStorageResponse::NOT_FOUND) { + return 0; + } + return ret; } int S3Accessor::put_file(const std::string& path, const std::string& content) { @@ -392,21 +402,45 @@ int S3Accessor::check_versioning() { } int GcsAccessor::delete_prefix_impl(const std::string& path_prefix, int64_t expiration_time) { - LOG_INFO("delete prefix").tag("uri", to_uri(path_prefix)); + LOG_INFO("begin delete prefix").tag("uri", to_uri(path_prefix)); int ret = 0; + int cnt = 0; + int skip = 0; + int64_t del_nonexisted = 0; + int del = 0; auto iter = obj_client_->list_objects({conf_.bucket, get_key(path_prefix)}); for (auto obj = iter->next(); obj.has_value(); obj = iter->next()) { + if (!(++cnt % 100)) { + LOG_INFO("loop delete prefix") + .tag("uri", to_uri(path_prefix)) + .tag("total_obj_cnt", cnt) + .tag("deleted", del) + .tag("del_nonexisted", del_nonexisted) + .tag("skipped", skip); + } if (expiration_time > 0 && obj->mtime_s > expiration_time) { + skip++; continue; } + del++; - // FIXME(plat1ko): Delete objects by batch - if (int del_ret = obj_client_->delete_object({conf_.bucket, obj->key}).ret; del_ret != 0) { + // FIXME(plat1ko): Delete objects by batch with genuine GCS client + int del_ret = obj_client_->delete_object({conf_.bucket, obj->key}).ret; + del_nonexisted += (del_ret == ObjectStorageResponse::NOT_FOUND); + static_assert(ObjectStorageResponse::OK == 0); + if (del_ret != ObjectStorageResponse::OK && del_ret != ObjectStorageResponse::NOT_FOUND) { ret = del_ret; } } + LOG_INFO("finish delete prefix") + .tag("uri", to_uri(path_prefix)) + .tag("total_obj_cnt", cnt) + .tag("deleted", del) + .tag("del_nonexisted", del_nonexisted) + .tag("skipped", skip); + if (!iter->is_valid()) { return -1; } diff --git a/cloud/src/recycler/s3_obj_client.cpp b/cloud/src/recycler/s3_obj_client.cpp index c8dcdad18d7115..0e548819d25ce4 100644 --- a/cloud/src/recycler/s3_obj_client.cpp +++ b/cloud/src/recycler/s3_obj_client.cpp @@ -293,9 +293,12 @@ ObjectStorageResponse S3ObjClient::delete_object(ObjectStoragePathRef path) { .tag("responseCode", static_cast(outcome.GetError().GetResponseCode())) .tag("error", outcome.GetError().GetMessage()) .tag("exception", outcome.GetError().GetExceptionName()); - return -1; + if (outcome.GetError().GetResponseCode() == Aws::Http::HttpResponseCode::NOT_FOUND) { + return {ObjectStorageResponse::NOT_FOUND, outcome.GetError().GetMessage()}; + } + return {ObjectStorageResponse::UNDEFINED, outcome.GetError().GetMessage()}; } - return 0; + return {ObjectStorageResponse::OK}; } ObjectStorageResponse S3ObjClient::delete_objects_recursively(ObjectStoragePathRef path,